diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 72ea30f4d7..7e90b8c362 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -21,28 +21,44 @@ * `snow snowpark init` and `snow streamlit init` commands are removed in favor of `snow init` command. * Removed deprecated flags from `snow snowpark` commands. * Default Python version for Snowpark functions and procedures was bumped to 3.10 from 3.8. +* Snowpark commands + * `snow snowpark build` creates a .zip file for each specified artifact that is a directory. Non-Anaconda + dependencies are packaged once as `dependencies.zip`. + * `snow snowpark deploy` uploads all artifacts created during build step. Dependencies zip is upload once to + every Snowpark stage specified in project definition. + * The changes are compatible with V1 projects definition though the result state (file layout) is different. ## Deprecations - * Added deprecation warning for `native_app.package.scripts` in project definition file. ## New additions -* Added support for `native_app.package.post_deploy` scripts in project definition file. - * These scripts will execute whenever a Native App Package is created or updated. - * Currently only supports SQL scripts: `post_deploy: [{sql_script: script.sql}]` * Added `snow spcs service execute-job` command, which supports creating and executing a job service in the current schema. -* Added `snow app events` command to fetch logs and traces from local and customer app installations -* Added support for project definition file defaults in templates +* Added `snow app events` command to fetch logs and traces from local and customer app installations. * Added support for external access (api integrations and secrets) in Streamlit. * Support multiple Streamlit application in single snowflake.yml project definition file. ## Fixes and improvements -* Fixed problem with whitespaces in `snow connection add` command -* Added check for the correctness of token file and private key paths when addind a connection +* Fixed problem with whitespaces in `snow connection add` command. +* Added check for the correctness of token file and private key paths when addind a connection. * Fix the typo in spcs service name argument description. It is the identifier of the **service** instead of the **service pool**. -* Fix error handling and improve messaging when no artifacts provided +* Fix error handling and improve messaging when no artifacts provided. * Improved error message for incompatible parameters. + + +# v2.8.0 +## Backward incompatibility + +## Deprecations + * Added deprecation warning for `native_app.package.scripts` in project definition file. + +## New additions +* Added support for project definition file defaults in templates. +* Added support for `native_app.package.post_deploy` scripts in project definition file. + * These scripts will execute whenever a Native App Package is created or updated. + * Currently only supports SQL scripts: `post_deploy: [{sql_script: script.sql}]`. + +## Fixes and improvements * Fix return values of `snow snowpark list`, `describe` and `drop` commands. -* Show warnings returned by Snowflake when `snow app run` is successful +* Show warnings returned by Snowflake when `snow app run` is successful. # v2.7.0 diff --git a/src/snowflake/cli/_plugins/snowpark/commands.py b/src/snowflake/cli/_plugins/snowpark/commands.py index fbcf0f2bb1..009bd7b0d4 100644 --- a/src/snowflake/cli/_plugins/snowpark/commands.py +++ b/src/snowflake/cli/_plugins/snowpark/commands.py @@ -15,11 +15,12 @@ from __future__ import annotations import logging +from collections import defaultdict from enum import Enum from typing import Dict, List, Optional, Set, Tuple import typer -from click import ClickException +from click import ClickException, UsageError from snowflake.cli._plugins.object.commands import ( describe as object_describe, ) @@ -35,7 +36,6 @@ from snowflake.cli._plugins.object.manager import ObjectManager from snowflake.cli._plugins.snowpark import package_utils from snowflake.cli._plugins.snowpark.common import ( - UdfSprocIdentifier, check_if_replace_is_required, ) from snowflake.cli._plugins.snowpark.manager import FunctionManager, ProcedureManager @@ -44,7 +44,10 @@ AnacondaPackagesManager, ) from snowflake.cli._plugins.snowpark.package.commands import app as package_app -from snowflake.cli._plugins.snowpark.snowpark_package_paths import SnowparkPackagePaths +from snowflake.cli._plugins.snowpark.snowpark_project_paths import ( + Artefact, + SnowparkProjectPaths, +) from snowflake.cli._plugins.snowpark.snowpark_shared import ( AllowSharedLibrariesOption, IgnoreAnacondaOption, @@ -69,7 +72,6 @@ from snowflake.cli.api.console import cli_console from snowflake.cli.api.constants import ( DEFAULT_SIZE_LIMIT_MB, - DEPLOYMENT_STAGE, ObjectType, ) from snowflake.cli.api.exceptions import ( @@ -98,6 +100,7 @@ ) from snowflake.cli.api.secure_path import SecurePath from snowflake.connector import DictCursor, ProgrammingError +from snowflake.connector.cursor import SnowflakeCursor log = logging.getLogger(__name__) @@ -121,6 +124,11 @@ ) +SnowparkEntities = Dict[str, SnowparkEntityModel] +StageToArtefactMapping = Dict[str, set[Artefact]] +EntityToImportPathsMapping = Dict[str, set[str]] + + @app.command("deploy", requires_connection=True) @with_project_definition() def deploy( @@ -132,116 +140,139 @@ def deploy( """ Deploys procedures and functions defined in project. Deploying the project alters all objects defined in it. By default, if any of the objects exist already the commands will fail unless `--replace` flag is provided. - All deployed objects use the same artifact which is deployed only once. + Required artifacts are deployed before creating functions or procedures. Dependencies are deployed once to + every stage specified in definitions. """ cli_context = get_cli_context() pd = _get_v2_project_definition(cli_context) - paths = SnowparkPackagePaths.for_snowpark_project( - project_root=SecurePath(cli_context.project_root), - project_definition=pd, + snowpark_entities = get_snowpark_entities(pd) + project_name = ( + pd.defaults.project_name if pd.defaults and pd.defaults.project_name else "" + ) + project_paths = SnowparkProjectPaths( + project_root=cli_context.project_root, project_name=project_name ) - procedures: Dict[str, ProcedureEntityModel] = pd.get_entities_by_type("procedure") - functions: Dict[str, FunctionEntityModel] = pd.get_entities_by_type("function") + with cli_console.phase("Performing initial validation"): + if not snowpark_entities: + raise ClickException( + "No procedures or functions were specified in the project definition." + ) + validate_all_artifacts_exists( + project_paths=project_paths, snowpark_entities=snowpark_entities + ) - if not procedures and not functions: - raise ClickException( - "No procedures or functions were specified in the project definition." + # Validate current state + with cli_console.phase("Checking remote state"): + om = ObjectManager() + _check_if_all_defined_integrations_exists(om, snowpark_entities) + existing_objects = check_for_existing_objects(om, replace, snowpark_entities) + + with cli_console.phase("Preparing required stages and artifacts"): + entities_to_imports_map, stages_to_artifact_map = build_artifacts_mappings( + project_paths=project_paths, + snowpark_entities=snowpark_entities, ) - if not paths.artifact_file.exists(): - raise ClickException( - "Artifact required for deploying the project does not exist in this directory. " - "Please use build command to create it." + create_stages_and_upload_artifacts(stages_to_artifact_map) + + # Create snowpark entities + with cli_console.phase("Creating Snowpark entities"): + snowflake_dependencies = _read_snowflake_requirements_file( + project_paths.snowflake_requirements ) + deploy_status = [] + for entity in snowpark_entities.values(): + cli_console.step(f"Creating {entity.type} {entity.fqn}") + operation_result = _deploy_single_object( + entity=entity, + existing_objects=existing_objects, + snowflake_dependencies=snowflake_dependencies, + entities_to_artifact_map=entities_to_imports_map, + ) + deploy_status.append(operation_result) - pm = ProcedureManager() - fm = FunctionManager() - om = ObjectManager() + return CollectionResult(deploy_status) - _check_if_all_defined_integrations_exists(om, functions, procedures) - existing_functions = _find_existing_objects(ObjectType.FUNCTION, functions, om) - existing_procedures = _find_existing_objects(ObjectType.PROCEDURE, procedures, om) +def validate_all_artifacts_exists( + project_paths: SnowparkProjectPaths, snowpark_entities: SnowparkEntities +): + for key, entity in snowpark_entities.items(): + for artefact in entity.artifacts: + path = project_paths.get_artefact_dto(artefact).post_build_path + if not path.exists(): + raise UsageError( + f"Artefact {path} required for {entity.type} {key} does not exist." + ) - if (existing_functions or existing_procedures) and not replace: - msg = "Following objects already exists. Consider using --replace.\n" - msg += "\n".join(f"function: {n}" for n in existing_functions) - msg += "\n" if existing_functions and existing_procedures else "" - msg += "\n".join(f"procedure: {n}" for n in existing_procedures) - raise ClickException(msg) - # Create stage - snowflake_dependencies = _read_snowflake_requrements_file( - paths.snowflake_requirements_file +def check_for_existing_objects( + om: ObjectManager, replace: bool, snowpark_entities: SnowparkEntities +) -> Dict[str, SnowflakeCursor]: + existing_objects: Dict[str, SnowflakeCursor] = _find_existing_objects( + snowpark_entities, om ) - stage_names = { - entity.stage for entity in [*functions.values(), *procedures.values()] - } - stage_manager = StageManager() + if existing_objects and not replace: + existing_entities = [snowpark_entities[e] for e in existing_objects] + msg = "Following objects already exists. Consider using --replace.\n" + msg += "\n".join(f"{e.type}: {e.entity_id}" for e in existing_entities) + raise ClickException(msg) + return existing_objects - # TODO: Raise error if stage name is not provided - for stage in stage_names: - cli_console.step(f"Creating stage: {stage}") - stage = FQN.from_string(stage).using_context() +def build_artifacts_mappings( + project_paths: SnowparkProjectPaths, snowpark_entities: SnowparkEntities +) -> Tuple[EntityToImportPathsMapping, StageToArtefactMapping]: + stages_to_artifact_map: StageToArtefactMapping = defaultdict(set) + entities_to_imports_map: EntityToImportPathsMapping = defaultdict(set) + for entity_id, entity in snowpark_entities.items(): + stage = entity.stage + required_artifacts = set() + for artefact in entity.artifacts: + artefact_dto = project_paths.get_artefact_dto(artefact) + required_artifacts.add(artefact_dto) + entities_to_imports_map[entity_id].add(artefact_dto.import_path(stage)) + stages_to_artifact_map[stage].update(required_artifacts) + + if project_paths.dependencies.exists(): + deps_artefact = project_paths.get_artefact_dto(project_paths.dependencies) + stages_to_artifact_map[stage].add(deps_artefact) + entities_to_imports_map[entity_id].add(deps_artefact.import_path(stage)) + return entities_to_imports_map, stages_to_artifact_map + + +def create_stages_and_upload_artifacts(stages_to_artifact_map: StageToArtefactMapping): + stage_manager = StageManager() + for stage, artifacts in stages_to_artifact_map.items(): + cli_console.step(f"Creating (if not exists) stage: {stage}") + stage = FQN.from_stage(stage).using_context() stage_manager.create(fqn=stage, comment="deployments managed by Snowflake CLI") - artifact_stage_directory = get_app_stage_path(stage, pd.defaults.project_name) - artifact_stage_target = ( - f"{artifact_stage_directory}/{paths.artifact_file.path.name}" - ) - - stage_manager.put( - local_path=paths.artifact_file.path, - stage_path=artifact_stage_directory, - overwrite=True, - ) - - deploy_status = [] - # Procedures - for procedure in procedures.values(): - operation_result = _deploy_single_object( - manager=pm, - object_type=ObjectType.PROCEDURE, - object_definition=procedure, - existing_objects=existing_procedures, - snowflake_dependencies=snowflake_dependencies, - stage_artifact_path=artifact_stage_target, - ) - deploy_status.append(operation_result) - - # Functions - for function in functions.values(): - operation_result = _deploy_single_object( - manager=fm, - object_type=ObjectType.FUNCTION, - object_definition=function, - existing_objects=existing_functions, - snowflake_dependencies=snowflake_dependencies, - stage_artifact_path=artifact_stage_target, - ) - deploy_status.append(operation_result) - - return CollectionResult(deploy_status) + for artefact in artifacts: + cli_console.step( + f"Uploading {artefact.post_build_path.name} to {artefact.upload_path(stage)}" + ) + stage_manager.put( + local_path=artefact.post_build_path, + stage_path=artefact.upload_path(stage), + overwrite=True, + ) def _find_existing_objects( - object_type: ObjectType, - objects: Dict[str, SnowparkEntityModel], + objects: SnowparkEntities, om: ObjectManager, -): +) -> Dict[str, SnowflakeCursor]: existing_objects = {} - for object_name, object_definition in objects.items(): - identifier = UdfSprocIdentifier.from_definition( - object_definition - ).identifier_with_arg_types + for entity_id, entity in objects.items(): + identifier = entity.udf_sproc_identifier.identifier_with_arg_types try: current_state = om.describe( - object_type=object_type.value.sf_name, + object_type=entity.type, fqn=FQN.from_string(identifier), ) - existing_objects[identifier] = current_state + existing_objects[entity_id] = current_state except ProgrammingError: pass return existing_objects @@ -249,8 +280,7 @@ def _find_existing_objects( def _check_if_all_defined_integrations_exists( om: ObjectManager, - functions: Dict[str, FunctionEntityModel], - procedures: Dict[str, ProcedureEntityModel], + snowpark_entities: Dict[str, FunctionEntityModel | ProcedureEntityModel], ): existing_integrations = { i["name"].lower() @@ -258,7 +288,7 @@ def _check_if_all_defined_integrations_exists( if i["type"] == "EXTERNAL_ACCESS" } declared_integration: Set[str] = set() - for object_definition in [*functions.values(), *procedures.values()]: + for object_definition in snowpark_entities.values(): external_access_integrations = { s.lower() for s in object_definition.external_access_integrations } @@ -276,84 +306,73 @@ def _check_if_all_defined_integrations_exists( ) -def get_app_stage_path(stage_name: Optional[str | FQN], project_name: str) -> str: - artifact_stage_directory = f"@{(stage_name or DEPLOYMENT_STAGE)}/{project_name}" - return artifact_stage_directory - - def _deploy_single_object( - manager: FunctionManager | ProcedureManager, - object_type: ObjectType, - object_definition: SnowparkEntityModel, - existing_objects: Dict[str, Dict], + entity: SnowparkEntityModel, + existing_objects: Dict[str, SnowflakeCursor], snowflake_dependencies: List[str], - stage_artifact_path: str, + entities_to_artifact_map: EntityToImportPathsMapping, ): - - identifiers = UdfSprocIdentifier.from_definition(object_definition) - - log.info( - "Deploying %s: %s", object_type, identifiers.identifier_with_arg_names_types - ) - - handler = object_definition.handler - returns = object_definition.returns - imports = object_definition.imports - external_access_integrations = object_definition.external_access_integrations - runtime_ver = object_definition.runtime + object_type = entity.get_type() + is_procedure = isinstance(entity, ProcedureEntityModel) + + handler = entity.handler + returns = entity.returns + imports = entity.imports + external_access_integrations = entity.external_access_integrations + runtime_ver = entity.runtime execute_as_caller = None - if object_type == ObjectType.PROCEDURE: - execute_as_caller = object_definition.execute_as_caller + if is_procedure: + execute_as_caller = entity.execute_as_caller replace_object = False - object_exists = identifiers.identifier_with_arg_types in existing_objects + object_exists = entity.entity_id in existing_objects if object_exists: replace_object = check_if_replace_is_required( object_type=object_type, - current_state=existing_objects[identifiers.identifier_with_arg_types], + current_state=existing_objects[entity.entity_id], handler=handler, return_type=returns, snowflake_dependencies=snowflake_dependencies, external_access_integrations=external_access_integrations, imports=imports, - stage_artifact_file=stage_artifact_path, + stage_artifact_files=entities_to_artifact_map[entity.entity_id], runtime_ver=runtime_ver, execute_as_caller=execute_as_caller, ) if object_exists and not replace_object: return { - "object": identifiers.identifier_with_arg_names_types_defaults, + "object": entity.udf_sproc_identifier.identifier_with_arg_names_types_defaults, "type": str(object_type), "status": "packages updated", } create_or_replace_kwargs = { - "identifier": identifiers, + "identifier": entity.udf_sproc_identifier, "handler": handler, "return_type": returns, - "artifact_file": stage_artifact_path, + "artifact_files": entities_to_artifact_map[entity.entity_id], "packages": snowflake_dependencies, - "runtime": object_definition.runtime, - "external_access_integrations": object_definition.external_access_integrations, - "secrets": object_definition.secrets, + "runtime": entity.runtime, + "external_access_integrations": entity.external_access_integrations, + "secrets": entity.secrets, "imports": imports, } - if object_type == ObjectType.PROCEDURE: - create_or_replace_kwargs[ - "execute_as_caller" - ] = object_definition.execute_as_caller + if is_procedure: + create_or_replace_kwargs["execute_as_caller"] = entity.execute_as_caller + + manager = ProcedureManager() if is_procedure else FunctionManager() manager.create_or_replace(**create_or_replace_kwargs) status = "created" if not object_exists else "definition updated" return { - "object": identifiers.identifier_with_arg_names_types_defaults, + "object": entity.udf_sproc_identifier.identifier_with_arg_names_types_defaults, "type": str(object_type), "status": status, } -def _read_snowflake_requrements_file(file_path: SecurePath): +def _read_snowflake_requirements_file(file_path: SecurePath): if not file_path.exists(): return [] return file_path.read_text(file_size_limit_mb=DEFAULT_SIZE_LIMIT_MB).splitlines() @@ -369,26 +388,27 @@ def build( **options, ) -> CommandResult: """ - Builds the Snowpark project as a `.zip` archive that can be used by `deploy` command. - The archive is built using only the `artifacts` directory specified in the project file. + Builds artifacts required for the Snowpark project. The artifacts can be used by `deploy` command. + For each directory in artifacts a .zip file is created. All non-anaconda dependencies are packaged in + dependencies.zip file. """ cli_context = get_cli_context() pd = _get_v2_project_definition(cli_context) - snowpark_paths = SnowparkPackagePaths.for_snowpark_project( - project_root=SecurePath(cli_context.project_root), - project_definition=pd, + project_paths = SnowparkProjectPaths( + project_root=cli_context.project_root, project_name=None ) - log.info("Building package using sources from:") - log.info(",".join(str(s) for s in snowpark_paths.sources)) anaconda_packages_manager = AnacondaPackagesManager() - with SecurePath.temporary_directory() as packages_dir: - if snowpark_paths.defined_requirements_file.exists(): - log.info("Resolving any requirements from requirements.txt...") + # Resolve dependencies + if project_paths.requirements.exists(): + with ( + cli_console.phase("Resolving dependencies from requirements.txt"), + SecurePath.temporary_directory() as temp_deps_dir, + ): requirements = package_utils.parse_requirements( - requirements_file=snowpark_paths.defined_requirements_file, + requirements_file=project_paths.requirements, ) anaconda_packages = ( AnacondaPackages.empty() @@ -397,7 +417,7 @@ def build( ) download_result = package_utils.download_unavailable_packages( requirements=requirements, - target_dir=packages_dir, + target_dir=temp_deps_dir, anaconda_packages=anaconda_packages, skip_version_check=skip_version_check, pip_index_url=index_url, @@ -416,27 +436,38 @@ def build( ) if download_result.anaconda_packages: anaconda_packages.write_requirements_file_in_snowflake_format( # type: ignore - file_path=snowpark_paths.snowflake_requirements_file, + file_path=project_paths.snowflake_requirements, requirements=download_result.anaconda_packages, ) - zip_dir( - source=snowpark_paths.sources_paths, - dest_zip=snowpark_paths.artifact_file.path, - ) - if any(packages_dir.iterdir()): - # if any packages were generated, append them to the .zip - zip_dir( - source=packages_dir.path, - dest_zip=snowpark_paths.artifact_file.path, - mode="a", - ) + if any(temp_deps_dir.path.iterdir()): + cli_console.step(f"Creating {project_paths.dependencies.name}") + zip_dir( + source=temp_deps_dir.path, + dest_zip=project_paths.dependencies, + ) + else: + cli_console.step(f"No external dependencies.") - log.info("Package now ready: %s", snowpark_paths.artifact_file.path) + artifacts = set() + for entity in get_snowpark_entities(pd).values(): + artifacts.update(entity.artifacts) - return MessageResult( - f"Build done. Artifact path: {snowpark_paths.artifact_file.path}" - ) + with cli_console.phase("Preparing artifacts for source code"): + for artefact in artifacts: + artefact_dto = project_paths.get_artefact_dto(artefact) + artefact_dto.build() + + return MessageResult(f"Build done.") + + +def get_snowpark_entities( + pd: ProjectDefinition, +) -> Dict[str, ProcedureEntityModel | FunctionEntityModel]: + procedures: Dict[str, ProcedureEntityModel] = pd.get_entities_by_type("procedure") + functions: Dict[str, FunctionEntityModel] = pd.get_entities_by_type("function") + snowpark_entities = {**procedures, **functions} + return snowpark_entities class _SnowparkObject(Enum): @@ -514,7 +545,7 @@ def describe( def _migrate_v1_snowpark_to_v2(pd: ProjectDefinition): if not pd.snowpark: raise NoProjectDefinitionError( - project_type="snowpark", project_file=get_cli_context().project_root + project_type="snowpark", project_root=get_cli_context().project_root ) data: dict = { @@ -535,7 +566,7 @@ def _migrate_v1_snowpark_to_v2(pd: ProjectDefinition): v2_entity = { "type": "function" if isinstance(entity, FunctionSchema) else "procedure", "stage": pd.snowpark.stage_name, - "artifacts": pd.snowpark.src, + "artifacts": [pd.snowpark.src], "handler": entity.handler, "returns": entity.returns, "signature": entity.signature, diff --git a/src/snowflake/cli/_plugins/snowpark/common.py b/src/snowflake/cli/_plugins/snowpark/common.py index 4e80b99de2..256631502c 100644 --- a/src/snowflake/cli/_plugins/snowpark/common.py +++ b/src/snowflake/cli/_plugins/snowpark/common.py @@ -22,9 +22,8 @@ generate_deploy_stage_name, ) from snowflake.cli.api.constants import ObjectType -from snowflake.cli.api.identifiers import FQN from snowflake.cli.api.project.schemas.entities.snowpark_entity import ( - SnowparkEntityModel, + UdfSprocIdentifier, ) from snowflake.cli.api.sql_execution import SqlExecutionMixin from snowflake.connector.cursor import SnowflakeCursor @@ -33,14 +32,14 @@ def check_if_replace_is_required( - object_type: ObjectType, + object_type: str, current_state, handler: str, return_type: str, snowflake_dependencies: List[str], external_access_integrations: List[str], imports: List[str], - stage_artifact_file: str, + stage_artifact_files: set[str], runtime_ver: Optional[str] = None, execute_as_caller: Optional[bool] = None, ) -> bool: @@ -81,7 +80,7 @@ def check_if_replace_is_required( ) return True - if _compare_imports(resource_json, imports, stage_artifact_file): + if _compare_imports(resource_json, imports, stage_artifact_files): log.info("Imports do not match. Replacing the %s", object_type) return True @@ -173,7 +172,7 @@ def create_query( identifier: UdfSprocIdentifier, return_type: str, handler: str, - artifact_file: str, + artifact_files: set[str], packages: List[str], imports: List[str], external_access_integrations: Optional[List[str]] = None, @@ -181,7 +180,7 @@ def create_query( runtime: Optional[str] = None, execute_as_caller: bool = False, ) -> str: - imports.append(artifact_file) + imports.extend(artifact_files) imports = [f"'{x}'" for x in imports] packages_list = ",".join(f"'{p}'" for p in packages) @@ -214,83 +213,14 @@ def create_query( return "\n".join(query) -def _is_signature_type_a_string(sig_type: str) -> bool: - return sig_type.lower() in ["string", "varchar"] - - -class UdfSprocIdentifier: - def __init__(self, identifier: FQN, arg_names, arg_types, arg_defaults): - self._identifier = identifier - self._arg_names = arg_names - self._arg_types = arg_types - self._arg_defaults = arg_defaults - - def _identifier_from_signature(self, sig: List[str], for_sql: bool = False): - signature = self._comma_join(sig) - id_ = self._identifier.sql_identifier if for_sql else self._identifier - return f"{id_}({signature})" - - @staticmethod - def _comma_join(*args): - return ", ".join(*args) - - @property - def identifier_with_arg_names(self): - return self._identifier_from_signature(self._arg_names) - - @property - def identifier_with_arg_types(self): - return self._identifier_from_signature(self._arg_types) - - @property - def identifier_with_arg_names_types(self): - sig = [f"{n} {t}" for n, t in zip(self._arg_names, self._arg_types)] - return self._identifier_from_signature(sig) - - @property - def identifier_with_arg_names_types_defaults(self): - return self._identifier_from_signature(self._full_signature()) - - def _full_signature(self): - sig = [] - for name, _type, _default in zip( - self._arg_names, self._arg_types, self._arg_defaults - ): - s = f"{name} {_type}" - if _default: - if _is_signature_type_a_string(_type): - _default = f"'{_default}'" - s += f" default {_default}" - sig.append(s) - return sig - - @property - def identifier_for_sql(self): - return self._identifier_from_signature(self._full_signature(), for_sql=True) - - @classmethod - def from_definition(cls, udf_sproc: SnowparkEntityModel): - names = [] - types = [] - defaults = [] - if udf_sproc.signature and udf_sproc.signature != "null": - for arg in udf_sproc.signature: - names.append(arg.name) - types.append(arg.arg_type) - defaults.append(arg.default) - - identifier = udf_sproc.fqn.using_context() - return cls(identifier, names, types, defaults) - - def _compare_imports( - resource_json: dict, imports: List[str], artifact_file: str + resource_json: dict, imports: List[str], artifact_files: set[str] ) -> bool: pattern = re.compile(r"(?:\[@?\w+_\w+\.)?(\w+(?:/\w+)+\.\w+)(?:\])?") project_imports = { imp - for import_string in [*imports, artifact_file] + for import_string in [*imports, *artifact_files] for imp in pattern.findall(import_string.lower()) } diff --git a/src/snowflake/cli/_plugins/snowpark/manager.py b/src/snowflake/cli/_plugins/snowpark/manager.py index 942a8ed209..1bbbf16106 100644 --- a/src/snowflake/cli/_plugins/snowpark/manager.py +++ b/src/snowflake/cli/_plugins/snowpark/manager.py @@ -19,9 +19,11 @@ from snowflake.cli._plugins.snowpark.common import ( SnowparkObjectManager, - UdfSprocIdentifier, ) from snowflake.cli.api.constants import ObjectType +from snowflake.cli.api.project.schemas.entities.snowpark_entity import ( + UdfSprocIdentifier, +) from snowflake.connector.cursor import SnowflakeCursor log = logging.getLogger(__name__) @@ -41,7 +43,7 @@ def create_or_replace( identifier: UdfSprocIdentifier, return_type: str, handler: str, - artifact_file: str, + artifact_files: set[str], packages: List[str], imports: List[str], external_access_integrations: Optional[List[str]] = None, @@ -51,13 +53,13 @@ def create_or_replace( log.debug( "Creating function %s using @%s", identifier.identifier_with_arg_names_types_defaults, - artifact_file, + artifact_files, ) query = self.create_query( identifier, return_type, handler, - artifact_file, + artifact_files, packages, imports, external_access_integrations, @@ -81,7 +83,7 @@ def create_or_replace( identifier: UdfSprocIdentifier, return_type: str, handler: str, - artifact_file: str, + artifact_files: set[str], packages: List[str], imports: List[str], external_access_integrations: Optional[List[str]] = None, @@ -92,13 +94,13 @@ def create_or_replace( log.debug( "Creating procedure %s using @%s", identifier.identifier_with_arg_names_types_defaults, - artifact_file, + artifact_files, ) query = self.create_query( identifier, return_type, handler, - artifact_file, + artifact_files, packages, imports, external_access_integrations, diff --git a/src/snowflake/cli/_plugins/snowpark/snowpark_package_paths.py b/src/snowflake/cli/_plugins/snowpark/snowpark_package_paths.py deleted file mode 100644 index 321db47904..0000000000 --- a/src/snowflake/cli/_plugins/snowpark/snowpark_package_paths.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from dataclasses import dataclass -from pathlib import Path -from typing import List - -from snowflake.cli.api.project.schemas.project_definition import DefinitionV20 -from snowflake.cli.api.secure_path import SecurePath - -_DEFINED_REQUIREMENTS = "requirements.txt" -_REQUIREMENTS_SNOWFLAKE = "requirements.snowflake.txt" - - -@dataclass -class SnowparkPackagePaths: - sources: List[SecurePath] - artifact_file: SecurePath - defined_requirements_file: SecurePath = SecurePath(_DEFINED_REQUIREMENTS) - snowflake_requirements_file: SecurePath = SecurePath(_REQUIREMENTS_SNOWFLAKE) - - @classmethod - def for_snowpark_project( - cls, project_root: SecurePath, project_definition: DefinitionV20 - ) -> "SnowparkPackagePaths": - sources = set() - entities = project_definition.get_entities_by_type( - "function" - ) | project_definition.get_entities_by_type("procedure") - for name, entity in entities.items(): - sources.add(entity.artifacts) - - return cls( - sources=[ - cls._get_snowpark_project_source_absolute_path( - project_root, SecurePath(source) - ) - for source in sources - ], - artifact_file=cls._get_snowpark_project_artifact_absolute_path( - project_root=project_root, - ), - defined_requirements_file=project_root / _DEFINED_REQUIREMENTS, - snowflake_requirements_file=project_root / _REQUIREMENTS_SNOWFLAKE, - ) - - @classmethod - def _get_snowpark_project_source_absolute_path( - cls, project_root: SecurePath, defined_source_path: SecurePath - ) -> SecurePath: - if defined_source_path.path.is_absolute(): - return defined_source_path - return SecurePath((project_root / defined_source_path.path).path.resolve()) - - @classmethod - def _get_snowpark_project_artifact_absolute_path( - cls, project_root: SecurePath - ) -> SecurePath: - - artifact_file = project_root / "app.zip" - return artifact_file - - @property - def sources_paths(self) -> List[Path]: - return [source.path for source in self.sources] diff --git a/src/snowflake/cli/_plugins/snowpark/snowpark_project_paths.py b/src/snowflake/cli/_plugins/snowpark/snowpark_project_paths.py new file mode 100644 index 0000000000..23392336fb --- /dev/null +++ b/src/snowflake/cli/_plugins/snowpark/snowpark_project_paths.py @@ -0,0 +1,106 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path + +from snowflake.cli._plugins.snowpark.zipper import zip_dir +from snowflake.cli.api.console import cli_console +from snowflake.cli.api.constants import DEPLOYMENT_STAGE +from snowflake.cli.api.identifiers import FQN +from snowflake.cli.api.secure_path import SecurePath + + +@dataclass +class SnowparkProjectPaths: + """ + This class represents allows you to manage files paths related to given project. + """ + + project_root: Path + project_name: str | None = None + + def path_relative_to_root(self, artifact_path: Path) -> Path: + if artifact_path.is_absolute(): + return artifact_path + return (self.project_root / artifact_path).resolve() + + def get_artefact_dto(self, artifact_path: Path) -> Artefact: + return Artefact( + project_name=self.project_name, + path=self.path_relative_to_root(artifact_path), + ) + + @property + def snowflake_requirements(self) -> SecurePath: + return SecurePath( + self.path_relative_to_root(Path("requirements.snowflake.txt")) + ) + + @property + def requirements(self) -> SecurePath: + return SecurePath(self.path_relative_to_root(Path("requirements.txt"))) + + @property + def dependencies(self) -> Path: + return self.path_relative_to_root(Path("dependencies.zip")) + + +@dataclass(unsafe_hash=True) +class Artefact: + """Helper for getting paths related to given artefact.""" + + path: Path + project_name: str | None = None + + @property + def _artefact_name(self) -> str: + if self.path.is_dir(): + return self.path.stem + ".zip" + return self.path.name + + @property + def post_build_path(self) -> Path: + """ + Returns post-build artefact path. Directories are mapped to corresponding .zip files. + """ + return self.path.parent / self._artefact_name + + def upload_path(self, stage: FQN | str | None) -> str: + """ + Path on stage to which the artefact should be uploaded. + """ + stage = stage or DEPLOYMENT_STAGE + if isinstance(stage, str): + stage = FQN.from_stage(stage).using_context() + + artifact_stage_directory = f"@{stage}/" + if self.project_name: + artifact_stage_directory += f"{self.project_name}/" + return artifact_stage_directory + + def import_path(self, stage: FQN | str | None) -> str: + """Path for UDF/sproc imports clause.""" + return self.upload_path(stage) + self._artefact_name + + def build(self) -> None: + """Build the artefact. Applies only to directories. Files are untouched.""" + if not self.path.is_dir(): + return + cli_console.step(f"Creating: {self.post_build_path.name}") + zip_dir( + source=self.path, + dest_zip=self.post_build_path, + ) diff --git a/src/snowflake/cli/_plugins/streamlit/commands.py b/src/snowflake/cli/_plugins/streamlit/commands.py index e2f4eedcfe..5bd30c24b4 100644 --- a/src/snowflake/cli/_plugins/streamlit/commands.py +++ b/src/snowflake/cli/_plugins/streamlit/commands.py @@ -137,7 +137,7 @@ def streamlit_deploy( if not pd.meets_version_requirement("2"): if not pd.streamlit: raise NoProjectDefinitionError( - project_type="streamlit", project_file=cli_context.project_root + project_type="streamlit", project_root=cli_context.project_root ) pd = _migrate_v1_streamlit_to_v2(pd) @@ -147,7 +147,7 @@ def streamlit_deploy( if not streamlits: raise NoProjectDefinitionError( - project_type="streamlit", project_file=cli_context.project_root + project_type="streamlit", project_root=cli_context.project_root ) if entity_id and entity_id not in streamlits: diff --git a/src/snowflake/cli/api/cli_global_context.py b/src/snowflake/cli/api/cli_global_context.py index 6e2a08986d..01edaaa95d 100644 --- a/src/snowflake/cli/api/cli_global_context.py +++ b/src/snowflake/cli/api/cli_global_context.py @@ -380,8 +380,8 @@ def project_definition(self) -> ProjectDefinition | None: return self._manager.project_definition @property - def project_root(self): - return self._manager.project_root + def project_root(self) -> Path: + return Path(self._manager.project_root) @property def template_context(self) -> dict: diff --git a/src/snowflake/cli/api/exceptions.py b/src/snowflake/cli/api/exceptions.py index 17745a2c3f..e1a39f2dd4 100644 --- a/src/snowflake/cli/api/exceptions.py +++ b/src/snowflake/cli/api/exceptions.py @@ -101,9 +101,9 @@ def __init__( class NoProjectDefinitionError(ClickException): - def __init__(self, project_type: str, project_file: str): + def __init__(self, project_type: str, project_root: str | Path): super().__init__( - f"No {project_type} project definition found in {project_file}" + f"No {project_type} project definition found in {project_root}" ) diff --git a/src/snowflake/cli/api/project/project_verification.py b/src/snowflake/cli/api/project/project_verification.py index 7abf3f6100..615fa71c09 100644 --- a/src/snowflake/cli/api/project/project_verification.py +++ b/src/snowflake/cli/api/project/project_verification.py @@ -19,5 +19,5 @@ def assert_project_type(project_type: str): cli_context = get_cli_context() if not getattr(cli_context.project_definition, project_type, None): raise NoProjectDefinitionError( - project_type=project_type, project_file=cli_context.project_root + project_type=project_type, project_root=cli_context.project_root ) diff --git a/src/snowflake/cli/api/project/schemas/entities/snowpark_entity.py b/src/snowflake/cli/api/project/schemas/entities/snowpark_entity.py index 2b636eff19..2b9c92118d 100644 --- a/src/snowflake/cli/api/project/schemas/entities/snowpark_entity.py +++ b/src/snowflake/cli/api/project/schemas/entities/snowpark_entity.py @@ -14,9 +14,11 @@ from __future__ import annotations +from pathlib import Path from typing import List, Literal, Optional, Union from pydantic import Field, field_validator +from snowflake.cli.api.identifiers import FQN from snowflake.cli.api.project.schemas.entities.common import ( EntityModelBase, ExternalAccessBaseModel, @@ -44,7 +46,7 @@ class SnowparkEntityModel(EntityModelBase, ExternalAccessBaseModel): default=[], ) stage: str = Field(title="Stage in which artifacts will be stored") - artifacts: str = Field(title="Folder where your code should be located") + artifacts: List[Path] = Field(title="List of required sources") @field_validator("runtime") @classmethod @@ -53,6 +55,18 @@ def convert_runtime(cls, runtime_input: Union[str, float]) -> str: return str(runtime_input) return runtime_input + @field_validator("artifacts") + @classmethod + def validate_artifacts(cls, artifacts: List[Path]) -> List[Path]: + for artefact in artifacts: + if "*" in str(artefact): + raise ValueError("Glob patterns not supported for Snowpark artifacts.") + return artifacts + + @property + def udf_sproc_identifier(self) -> UdfSprocIdentifier: + return UdfSprocIdentifier.from_definition(self) + class ProcedureEntityModel(SnowparkEntityModel): type: Literal["procedure"] = DiscriminatorField() # noqa: A003 @@ -65,3 +79,71 @@ class ProcedureEntityModel(SnowparkEntityModel): class FunctionEntityModel(SnowparkEntityModel): type: Literal["function"] = DiscriminatorField() # noqa: A003 + + +class UdfSprocIdentifier: + def __init__(self, identifier: FQN, arg_names, arg_types, arg_defaults): + self._identifier = identifier + self._arg_names = arg_names + self._arg_types = arg_types + self._arg_defaults = arg_defaults + + def _identifier_from_signature(self, sig: List[str], for_sql: bool = False): + signature = self._comma_join(sig) + id_ = self._identifier.sql_identifier if for_sql else self._identifier + return f"{id_}({signature})" + + @staticmethod + def _comma_join(*args): + return ", ".join(*args) + + @property + def identifier_with_arg_names(self): + return self._identifier_from_signature(self._arg_names) + + @property + def identifier_with_arg_types(self): + return self._identifier_from_signature(self._arg_types) + + @property + def identifier_with_arg_names_types(self): + sig = [f"{n} {t}" for n, t in zip(self._arg_names, self._arg_types)] + return self._identifier_from_signature(sig) + + @property + def identifier_with_arg_names_types_defaults(self): + return self._identifier_from_signature(self._full_signature()) + + def _is_signature_type_a_string(self, sig_type: str) -> bool: + return sig_type.lower() in ["string", "varchar"] + + def _full_signature(self): + sig = [] + for name, _type, _default in zip( + self._arg_names, self._arg_types, self._arg_defaults + ): + s = f"{name} {_type}" + if _default: + if self._is_signature_type_a_string(_type): + _default = f"'{_default}'" + s += f" default {_default}" + sig.append(s) + return sig + + @property + def identifier_for_sql(self): + return self._identifier_from_signature(self._full_signature(), for_sql=True) + + @classmethod + def from_definition(cls, udf_sproc: SnowparkEntityModel): + names = [] + types = [] + defaults = [] + if udf_sproc.signature and udf_sproc.signature != "null": + for arg in udf_sproc.signature: + names.append(arg.name) # type:ignore + types.append(arg.arg_type) # type:ignore + defaults.append(arg.default) # type:ignore + + identifier = udf_sproc.fqn.using_context() + return cls(identifier, names, types, defaults) diff --git a/tests/__snapshots__/test_help_messages.ambr b/tests/__snapshots__/test_help_messages.ambr index 4185083307..0f0b599245 100644 --- a/tests/__snapshots__/test_help_messages.ambr +++ b/tests/__snapshots__/test_help_messages.ambr @@ -2922,9 +2922,9 @@ Usage: default snowpark build [OPTIONS] - Builds the Snowpark project as a `.zip` archive that can be used by `deploy` - command. The archive is built using only the `artifacts` directory specified - in the project file. + Builds artifacts required for the Snowpark project. The artifacts can be used + by `deploy` command. For each directory in artifacts a .zip file is created. + All non-anaconda dependencies are packaged in dependencies.zip file. +- Options --------------------------------------------------------------------+ | --ignore-anaconda Does not lookup packages on | @@ -3014,8 +3014,9 @@ Deploys procedures and functions defined in project. Deploying the project alters all objects defined in it. By default, if any of the objects exist - already the commands will fail unless `--replace` flag is provided. All - deployed objects use the same artifact which is deployed only once. + already the commands will fail unless `--replace` flag is provided. Required + artifacts are deployed before creating functions or procedures. Dependencies + are deployed once to every stage specified in definitions. +- Options --------------------------------------------------------------------+ | --replace Replaces procedure or function, even if no detected | @@ -3664,14 +3665,16 @@ | --help -h Show this message and exit. | +------------------------------------------------------------------------------+ +- Commands -------------------------------------------------------------------+ - | build Builds the Snowpark project as a `.zip` archive that can be used | - | by `deploy` command. The archive is built using only the | - | `artifacts` directory specified in the project file. | + | build Builds artifacts required for the Snowpark project. The artifacts | + | can be used by `deploy` command. For each directory in artifacts | + | a .zip file is created. All non-anaconda dependencies are | + | packaged in dependencies.zip file. | | deploy Deploys procedures and functions defined in project. Deploying | | the project alters all objects defined in it. By default, if any | | of the objects exist already the commands will fail unless | - | `--replace` flag is provided. All deployed objects use the same | - | artifact which is deployed only once. | + | `--replace` flag is provided. Required artifacts are deployed | + | before creating functions or procedures. Dependencies are | + | deployed once to every stage specified in definitions. | | describe Provides description of a procedure or function. | | drop Drop procedure or function. | | execute Executes a procedure or function in a specified environment. | @@ -7874,14 +7877,16 @@ | --help -h Show this message and exit. | +------------------------------------------------------------------------------+ +- Commands -------------------------------------------------------------------+ - | build Builds the Snowpark project as a `.zip` archive that can be used | - | by `deploy` command. The archive is built using only the | - | `artifacts` directory specified in the project file. | + | build Builds artifacts required for the Snowpark project. The artifacts | + | can be used by `deploy` command. For each directory in artifacts | + | a .zip file is created. All non-anaconda dependencies are | + | packaged in dependencies.zip file. | | deploy Deploys procedures and functions defined in project. Deploying | | the project alters all objects defined in it. By default, if any | | of the objects exist already the commands will fail unless | - | `--replace` flag is provided. All deployed objects use the same | - | artifact which is deployed only once. | + | `--replace` flag is provided. Required artifacts are deployed | + | before creating functions or procedures. Dependencies are | + | deployed once to every stage specified in definitions. | | describe Provides description of a procedure or function. | | drop Drop procedure or function. | | execute Executes a procedure or function in a specified environment. | diff --git a/tests/project/__snapshots__/test_config.ambr b/tests/project/__snapshots__/test_config.ambr index e8a5534e3d..01dd986e7d 100644 --- a/tests/project/__snapshots__/test_config.ambr +++ b/tests/project/__snapshots__/test_config.ambr @@ -246,7 +246,7 @@ 'procedures': list([ ]), 'project_name': 'my_snowpark_project', - 'src': 'app/', + 'src': 'app.py', 'stage_name': 'dev_deployment', }), 'streamlit': None, @@ -432,7 +432,7 @@ 'procedures': list([ ]), 'project_name': 'my_snowpark_project', - 'src': 'app/', + 'src': 'app.py', 'stage_name': 'dev_deployment', }), 'streamlit': None, @@ -474,7 +474,7 @@ 'procedures': list([ ]), 'project_name': 'my_snowpark_project', - 'src': 'app/', + 'src': 'app.py', 'stage_name': 'dev_deployment', }), 'streamlit': None, @@ -516,7 +516,7 @@ }), ]), 'project_name': 'my_snowpark_project', - 'src': 'app/', + 'src': 'app.py', 'stage_name': 'dev_deployment', }), 'streamlit': None, @@ -704,7 +704,7 @@ }), ]), 'project_name': 'my_snowpark_project', - 'src': 'app/', + 'src': 'app.py', 'stage_name': 'dev_deployment', }), 'streamlit': None, @@ -758,7 +758,7 @@ }), ]), 'project_name': 'my_snowpark_project', - 'src': 'app/', + 'src': 'app.py', 'stage_name': 'dev_deployment', }), 'streamlit': None, diff --git a/tests/project/test_config.py b/tests/project/test_config.py index 4bd789d41d..391b612e5b 100644 --- a/tests/project/test_config.py +++ b/tests/project/test_config.py @@ -104,7 +104,6 @@ def test_does_not_accept_unknown_fields(project_definition_files): "snowpark_procedure_fully_qualified_name", "snowpark_procedure_secrets_without_external_access", "snowpark_procedures", - "snowpark_procedures_coverage", "streamlit_full_definition", ], indirect=True, diff --git a/tests/project/test_project_definition_v2.py b/tests/project/test_project_definition_v2.py index 58192c9e08..16ded60fa4 100644 --- a/tests/project/test_project_definition_v2.py +++ b/tests/project/test_project_definition_v2.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from pathlib import Path import pytest from snowflake.cli._plugins.snowpark.commands import _migrate_v1_snowpark_to_v2 @@ -179,7 +180,7 @@ "returns": "string", "signature": [{"name": "name", "type": "string"}], "runtime": "3.10", - "artifacts": "src", + "artifacts": ["src"], } }, }, @@ -196,7 +197,7 @@ "returns": "string", "signature": [{"name": "name", "type": "string"}], "runtime": "3.10", - "artifacts": "src", + "artifacts": ["src"], "execute_as_caller": True, } }, @@ -213,7 +214,7 @@ "returns": "string", "signature": [{"name": "name", "type": "string"}], "runtime": "3.10", - "artifacts": "src", + "artifacts": ["src"], "execute_as_caller": True, } }, @@ -350,7 +351,6 @@ def test_entity_model_to_entity_map(): [ "snowpark_functions", "snowpark_procedures", - "snowpark_procedures_coverage", "snowpark_function_fully_qualified_name", ], ) @@ -375,13 +375,13 @@ def test_v1_to_v2_conversion( for v1_procedure in definition_v1.snowpark.procedures: v2_procedure = definition_v2.entities.get(v1_procedure.name) assert v2_procedure - assert v2_procedure.artifacts == definition_v1.snowpark.src + assert v2_procedure.artifacts == [Path(definition_v1.snowpark.src)] _assert_entities_are_equal(v1_procedure, v2_procedure) for v1_function in definition_v1.snowpark.functions: v2_function = definition_v2.entities.get(v1_function.name) assert v2_function - assert v2_function.artifacts == definition_v1.snowpark.src + assert v2_function.artifacts == [Path(definition_v1.snowpark.src)] _assert_entities_are_equal(v1_function, v2_function) diff --git a/tests/snowpark/__snapshots__/test_function.ambr b/tests/snowpark/__snapshots__/test_function.ambr index 99b898c4a6..edde16c81f 100644 --- a/tests/snowpark/__snapshots__/test_function.ambr +++ b/tests/snowpark/__snapshots__/test_function.ambr @@ -1,7 +1,18 @@ # serializer version: 1 # name: test_deploy_function_fully_qualified_name[ok] ''' - Creating stage: dev_deployment + Performing initial validation + Checking remote state + Preparing required stages and artifacts + Creating (if not exists) stage: dev_deployment + Uploading app.zip to @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ + Creating Snowpark entities + Creating function custom_db.custom_schema.fqn_function + Creating function custom_schema.fqn_function_only_schema + Creating function custom_schema.schema_function + Creating function custom_db.PUBLIC.database_function + Creating function custom_db.custom_schema.database_function + Creating function custom_database.custom_schema.fqn_function3 +------------------------------------------------------------------------------+ | object | type | status | |---------------------------------------------------------+----------+---------| @@ -19,6 +30,8 @@ # --- # name: test_deploy_function_fully_qualified_name_duplicated_database[database error] ''' + Performing initial validation + Checking remote state +- Error ----------------------------------------------------------------------+ | Database provided but name | | 'custom_database.custom_schema.fqn_function_error' is fully qualified name. | @@ -28,6 +41,8 @@ # --- # name: test_deploy_function_fully_qualified_name_duplicated_schema[schema error] ''' + Performing initial validation + Checking remote state +- Error ----------------------------------------------------------------------+ | Schema provided but name 'custom_schema.fqn_function_error' is fully | | qualified name. | @@ -37,6 +52,8 @@ # --- # name: test_deploy_function_secrets_without_external_access ''' + Performing initial validation + Checking remote state +- Error ----------------------------------------------------------------------+ | func1 defined with secrets but without external integration. | +------------------------------------------------------------------------------+ diff --git a/tests/snowpark/__snapshots__/test_procedure.ambr b/tests/snowpark/__snapshots__/test_procedure.ambr index 0758725119..aab627199a 100644 --- a/tests/snowpark/__snapshots__/test_procedure.ambr +++ b/tests/snowpark/__snapshots__/test_procedure.ambr @@ -1,6 +1,8 @@ # serializer version: 1 # name: test_deploy_procedure_fails_if_integration_does_not_exists ''' + Performing initial validation + Checking remote state +- Error ----------------------------------------------------------------------+ | Following external access integration does not exists in Snowflake: | | external_2 | @@ -10,16 +12,20 @@ # --- # name: test_deploy_procedure_fails_if_object_exists_and_no_replace ''' + Performing initial validation + Checking remote state +- Error ----------------------------------------------------------------------+ | Following objects already exists. Consider using --replace. | - | procedure: MockDatabase.MockSchema.procedureName(string) | - | procedure: MockDatabase.MockSchema.test() | + | procedure: procedureName | + | procedure: test | +------------------------------------------------------------------------------+ ''' # --- # name: test_deploy_procedure_fully_qualified_name[database error] ''' + Performing initial validation + Checking remote state +- Error ----------------------------------------------------------------------+ | Database provided but name | | 'custom_database.custom_schema.fqn_procedure_error' is fully qualified name. | @@ -29,6 +35,8 @@ # --- # name: test_deploy_procedure_fully_qualified_name_duplicated_schema[schema error] ''' + Performing initial validation + Checking remote state +- Error ----------------------------------------------------------------------+ | Schema provided but name 'custom_schema.fqn_procedure_error' is fully | | qualified name. | @@ -38,6 +46,8 @@ # --- # name: test_deploy_procedure_secrets_without_external_access ''' + Performing initial validation + Checking remote state +- Error ----------------------------------------------------------------------+ | procedureName defined with secrets but without external integration. | +------------------------------------------------------------------------------+ diff --git a/tests/snowpark/test_common.py b/tests/snowpark/test_common.py index 0fc6177557..f381ee7fe7 100644 --- a/tests/snowpark/test_common.py +++ b/tests/snowpark/test_common.py @@ -23,7 +23,6 @@ _sql_to_python_return_type_mapper, check_if_replace_is_required, ) -from snowflake.cli.api.constants import ObjectType def test_get_snowflake_packages_delta(): @@ -86,7 +85,11 @@ def test_sql_to_python_return_type_mapper(argument: Tuple[str, str]): ({"imports": ["@FOO.BAR.BAZ/some_project/some_package.zip"]}, True), ({"imports": ["@FOO.BAR.BAZ/my_snowpark_project/app.zip"]}, False), ( - {"stage_artifact_file": "@FOO.BAR.BAZ/my_snowpark_project/another_app.zip"}, + { + "stage_artifact_files": [ + "@FOO.BAR.BAZ/my_snowpark_project/another_app.zip" + ] + }, True, ), ({"runtime_ver": "3.9"}, True), @@ -100,7 +103,7 @@ def test_check_if_replace_is_required(mock_procedure_description, arguments, exp "snowflake_dependencies": ["snowflake-snowpark-python", "pytest<9.0.0,>=7.0.0"], "external_access_integrations": [], "imports": [], - "stage_artifact_file": "@FOO.BAR.BAZ/my_snowpark_project/app.zip", + "stage_artifact_files": ["@FOO.BAR.BAZ/my_snowpark_project/app.zip"], "runtime_ver": "3.10", "execute_as_caller": True, } @@ -108,7 +111,7 @@ def test_check_if_replace_is_required(mock_procedure_description, arguments, exp assert ( check_if_replace_is_required( - ObjectType.PROCEDURE, mock_procedure_description, **replace_arguments + "procedure", mock_procedure_description, **replace_arguments ) == expected ) diff --git a/tests/snowpark/test_function.py b/tests/snowpark/test_function.py index 3008027267..1f3ce17de3 100644 --- a/tests/snowpark/test_function.py +++ b/tests/snowpark/test_function.py @@ -53,7 +53,7 @@ def test_deploy_function( assert result.exit_code == 0, result.output assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", - f"put file://{Path(project_dir).resolve()}/app.zip @MockDatabase.MockSchema.dev_deployment/my_snowpark_project" + f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/" f" auto_compress=false parallel=4 overwrite=True", dedent( """\ @@ -62,7 +62,7 @@ def test_deploy_function( returns string language python runtime_version=3.10 - imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.zip') + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') handler='app.func1_handler' packages=() """ @@ -101,7 +101,7 @@ def test_deploy_function_with_external_access( assert result.exit_code == 0, result.output assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", - f"put file://{Path(project_dir).resolve()}/app.zip @MockDatabase.MockSchema.dev_deployment/my_snowpark_project" + f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/" f" auto_compress=false parallel=4 overwrite=True", dedent( """\ @@ -110,7 +110,7 @@ def test_deploy_function_with_external_access( returns string language python runtime_version=3.10 - imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.zip') + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') handler='app.func1_handler' packages=() external_access_integrations=(external_1,external_2) @@ -161,7 +161,7 @@ def test_deploy_function_no_changes( ("packages", '["foo==1.2.3", "bar>=3.0.0"]'), ("handler", "app.func1_handler"), ("returns", "string"), - ("imports", "dev_deployment/my_snowpark_project/app.zip"), + ("imports", "dev_deployment/my_snowpark_project/app.py"), ("runtime_version", "3.10"), ] @@ -185,7 +185,7 @@ def test_deploy_function_no_changes( ] assert queries == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", - f"put file://{Path(project_dir).resolve()}/app.zip @MockDatabase.MockSchema.dev_deployment/my_snowpark_project auto_compress=false parallel=4 overwrite=True", + f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ auto_compress=false parallel=4 overwrite=True", ] @@ -223,7 +223,7 @@ def test_deploy_function_needs_update_because_packages_changes( ] assert queries == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", - f"put file://{Path(project_dir).resolve()}/app.zip @MockDatabase.MockSchema.dev_deployment/my_snowpark_project auto_compress=false parallel=4 overwrite=True", + f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ auto_compress=false parallel=4 overwrite=True", dedent( """\ create or replace function IDENTIFIER('MockDatabase.MockSchema.func1')(a string default 'default value', b variant) @@ -231,7 +231,7 @@ def test_deploy_function_needs_update_because_packages_changes( returns string language python runtime_version=3.10 - imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.zip') + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') handler='app.func1_handler' packages=('foo==1.2.3','bar>=3.0.0') """ @@ -273,7 +273,7 @@ def test_deploy_function_needs_update_because_handler_changes( ] assert queries == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", - f"put file://{Path(project_dir).resolve()}/app.zip @MockDatabase.MockSchema.dev_deployment/my_snowpark_project" + f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/" f" auto_compress=false parallel=4 overwrite=True", dedent( """\ @@ -282,7 +282,7 @@ def test_deploy_function_needs_update_because_handler_changes( returns string language python runtime_version=3.10 - imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.zip') + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') handler='app.func1_handler' packages=('foo==1.2.3','bar>=3.0.0') """ diff --git a/tests/snowpark/test_procedure.py b/tests/snowpark/test_procedure.py index 3d15e69e62..97f7a960d2 100644 --- a/tests/snowpark/test_procedure.py +++ b/tests/snowpark/test_procedure.py @@ -77,7 +77,7 @@ def test_deploy_procedure( ) assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", - f"put file://{Path(tmp).resolve()}/app.zip @MockDatabase.MockSchema.dev_deployment/my_snowpark_project auto_compress=false parallel=4 overwrite=True", + f"put file://{Path(tmp).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/ auto_compress=false parallel=4 overwrite=True", dedent( """\ create or replace procedure IDENTIFIER('MockDatabase.MockSchema.procedureName')(name string) @@ -85,7 +85,7 @@ def test_deploy_procedure( returns string language python runtime_version=3.10 - imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.zip') + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') handler='hello' packages=() """ @@ -97,7 +97,7 @@ def test_deploy_procedure( returns string language python runtime_version=3.10 - imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.zip') + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') handler='test' packages=() """ @@ -146,7 +146,7 @@ def test_deploy_procedure_with_external_access( ) assert ctx.get_queries() == [ "create stage if not exists IDENTIFIER('MockDatabase.MockSchema.dev_deployment') comment='deployments managed by Snowflake CLI'", - f"put file://{Path(project_dir).resolve()}/app.zip @MockDatabase.MockSchema.dev_deployment/my_snowpark_project" + f"put file://{Path(project_dir).resolve()}/app.py @MockDatabase.MockSchema.dev_deployment/my_snowpark_project/" f" auto_compress=false parallel=4 overwrite=True", dedent( """\ @@ -155,7 +155,7 @@ def test_deploy_procedure_with_external_access( returns string language python runtime_version=3.10 - imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.zip') + imports=('@MockDatabase.MockSchema.dev_deployment/my_snowpark_project/app.py') handler='app.hello' packages=() external_access_integrations=(external_1,external_2) @@ -284,7 +284,7 @@ def test_deploy_procedure_replace_nothing_to_update( ("packages", "[]"), ("handler", "hello"), ("returns", "string"), - ("imports", "dev_deployment/my_snowpark_project/app.zip"), + ("imports", "dev_deployment/my_snowpark_project/app.py"), ], columns=["key", "value"], ), @@ -293,7 +293,7 @@ def test_deploy_procedure_replace_nothing_to_update( ("packages", "[]"), ("handler", "test"), ("returns", "string"), - ("imports", "dev_deployment/my_snowpark_project/app.zip"), + ("imports", "dev_deployment/my_snowpark_project/app.py"), ("runtime_version", "3.10"), ], columns=["key", "value"], @@ -338,7 +338,7 @@ def test_deploy_procedure_replace_updates_single_object( ("packages", "[]"), ("handler", "hello"), ("returns", "string"), - ("imports", "dev_deployment/my_snowpark_project/app.zip"), + ("imports", "dev_deployment/my_snowpark_project/app.py"), ], columns=["key", "value"], ), @@ -391,7 +391,7 @@ def test_deploy_procedure_replace_creates_missing_object( ("packages", "[]"), ("handler", "hello"), ("returns", "string"), - ("imports", "dev_deployment/my_snowpark_project/app.zip"), + ("imports", "dev_deployment/my_snowpark_project/app.py"), ], columns=["key", "value"], ), diff --git a/tests/test_data/projects/snowpark_function_external_access/app.zip b/tests/test_data/projects/snowpark_function_external_access/app.zip deleted file mode 100644 index d5b4f8c041..0000000000 Binary files a/tests/test_data/projects/snowpark_function_external_access/app.zip and /dev/null differ diff --git a/tests/test_data/projects/snowpark_function_external_access/snowflake.yml b/tests/test_data/projects/snowpark_function_external_access/snowflake.yml index f7ed98f957..fa6a9d1a4c 100644 --- a/tests/test_data/projects/snowpark_function_external_access/snowflake.yml +++ b/tests/test_data/projects/snowpark_function_external_access/snowflake.yml @@ -2,7 +2,7 @@ definition_version: 1 snowpark: project_name: "my_snowpark_project" stage_name: "dev_deployment" - src: "app/" + src: "app.py" functions: - name: func1 handler: "app.func1_handler" diff --git a/tests/test_data/projects/snowpark_function_secrets_without_external_access/app.py b/tests/test_data/projects/snowpark_function_secrets_without_external_access/app.py new file mode 100644 index 0000000000..0c336bf90f --- /dev/null +++ b/tests/test_data/projects/snowpark_function_secrets_without_external_access/app.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +import sys + + +def hello(name: str) -> str: + return f"Hello {name}!" + + +# For local debugging. Be aware you may need to type-convert arguments if +# you add input parameters +if __name__ == "__main__": + if len(sys.argv) > 1: + print(hello(sys.argv[1])) # type: ignore + else: + print(hello("world")) diff --git a/tests/test_data/projects/snowpark_function_secrets_without_external_access/app.zip b/tests/test_data/projects/snowpark_function_secrets_without_external_access/app.zip deleted file mode 100644 index d5b4f8c041..0000000000 Binary files a/tests/test_data/projects/snowpark_function_secrets_without_external_access/app.zip and /dev/null differ diff --git a/tests/test_data/projects/snowpark_function_secrets_without_external_access/snowflake.yml b/tests/test_data/projects/snowpark_function_secrets_without_external_access/snowflake.yml index c49f8aba04..5d59ce8ae0 100644 --- a/tests/test_data/projects/snowpark_function_secrets_without_external_access/snowflake.yml +++ b/tests/test_data/projects/snowpark_function_secrets_without_external_access/snowflake.yml @@ -2,7 +2,7 @@ definition_version: 1 snowpark: project_name: "my_snowpark_project" stage_name: "dev_deployment" - src: "app/" + src: "app.py" functions: - name: func1 handler: "app.func1_handler" diff --git a/tests/test_data/projects/snowpark_functions/app.zip b/tests/test_data/projects/snowpark_functions/app.zip deleted file mode 100644 index d5b4f8c041..0000000000 Binary files a/tests/test_data/projects/snowpark_functions/app.zip and /dev/null differ diff --git a/tests/test_data/projects/snowpark_functions/snowflake.yml b/tests/test_data/projects/snowpark_functions/snowflake.yml index 53bfff2eb3..489562e0ed 100644 --- a/tests/test_data/projects/snowpark_functions/snowflake.yml +++ b/tests/test_data/projects/snowpark_functions/snowflake.yml @@ -2,7 +2,7 @@ definition_version: 1 snowpark: project_name: "my_snowpark_project" stage_name: "dev_deployment" - src: "app/" + src: "app.py" functions: - name: func1 handler: "app.func1_handler" diff --git a/tests/test_data/projects/snowpark_procedure_external_access/app.zip b/tests/test_data/projects/snowpark_procedure_external_access/app.zip deleted file mode 100644 index 2dc9885eb1..0000000000 Binary files a/tests/test_data/projects/snowpark_procedure_external_access/app.zip and /dev/null differ diff --git a/tests/test_data/projects/snowpark_procedure_external_access/snowflake.yml b/tests/test_data/projects/snowpark_procedure_external_access/snowflake.yml index 87b8a2588e..7e0f28bf0e 100644 --- a/tests/test_data/projects/snowpark_procedure_external_access/snowflake.yml +++ b/tests/test_data/projects/snowpark_procedure_external_access/snowflake.yml @@ -2,7 +2,7 @@ definition_version: 1 snowpark: project_name: "my_snowpark_project" stage_name: "dev_deployment" - src: "app/" + src: "app.py" procedures: - name: procedureName handler: "app.hello" diff --git a/tests/test_data/projects/snowpark_procedure_secrets_without_external_access/app.py b/tests/test_data/projects/snowpark_procedure_secrets_without_external_access/app.py new file mode 100644 index 0000000000..cf8e7ed1ac --- /dev/null +++ b/tests/test_data/projects/snowpark_procedure_secrets_without_external_access/app.py @@ -0,0 +1,5 @@ +from snowflake.snowpark import Session + + +def hello(session: Session, name: str) -> str: + return f"Hello {name}" diff --git a/tests/test_data/projects/snowpark_procedure_secrets_without_external_access/snowflake.yml b/tests/test_data/projects/snowpark_procedure_secrets_without_external_access/snowflake.yml index 7b4b33fa2c..33ae59b626 100644 --- a/tests/test_data/projects/snowpark_procedure_secrets_without_external_access/snowflake.yml +++ b/tests/test_data/projects/snowpark_procedure_secrets_without_external_access/snowflake.yml @@ -2,7 +2,7 @@ definition_version: 1 snowpark: project_name: "my_snowpark_project" stage_name: "dev_deployment" - src: "app/" + src: "app.py" procedures: - name: procedureName handler: "app.hello" diff --git a/tests/test_data/projects/snowpark_procedures/app.zip b/tests/test_data/projects/snowpark_procedures/app.zip deleted file mode 100644 index 2dc9885eb1..0000000000 Binary files a/tests/test_data/projects/snowpark_procedures/app.zip and /dev/null differ diff --git a/tests/test_data/projects/snowpark_procedures/snowflake.yml b/tests/test_data/projects/snowpark_procedures/snowflake.yml index 7c69b6a774..7eb4dee794 100644 --- a/tests/test_data/projects/snowpark_procedures/snowflake.yml +++ b/tests/test_data/projects/snowpark_procedures/snowflake.yml @@ -2,7 +2,7 @@ definition_version: 1 snowpark: project_name: "my_snowpark_project" stage_name: "dev_deployment" - src: "app/" + src: "app.py" procedures: - name: procedureName handler: "hello" diff --git a/tests/test_data/projects/snowpark_procedures_coverage/app.py b/tests/test_data/projects/snowpark_procedures_coverage/app.py deleted file mode 100644 index 602af440e0..0000000000 --- a/tests/test_data/projects/snowpark_procedures_coverage/app.py +++ /dev/null @@ -1,26 +0,0 @@ -from __future__ import annotations - -import sys - -from snowflake.snowpark import Session - - -def hello(session: Session, name: str) -> str: - return f"Hello {name}" - - -def test(session: Session) -> str: - return "Test procedure" - - -# For local debugging. Be aware you may need to type-convert arguments if -# you add input parameters -if __name__ == "__main__": - from snowflake.cli.api.config import cli_config - - session = Session.builder.configs(cli_config.get_connection_dict("dev")).create() - if len(sys.argv) > 1: - print(hello(session, *sys.argv[1:])) # type: ignore - else: - print(hello(session)) # type: ignore - session.close() diff --git a/tests/test_data/projects/snowpark_procedures_coverage/app.zip b/tests/test_data/projects/snowpark_procedures_coverage/app.zip deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/test_data/projects/snowpark_procedures_coverage/requirements.txt b/tests/test_data/projects/snowpark_procedures_coverage/requirements.txt deleted file mode 100644 index e07b4c8561..0000000000 --- a/tests/test_data/projects/snowpark_procedures_coverage/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -snowflake-snowpark-python -#snowcli # for local development diff --git a/tests/test_data/projects/snowpark_procedures_coverage/snowflake.yml b/tests/test_data/projects/snowpark_procedures_coverage/snowflake.yml deleted file mode 100644 index b4f0508070..0000000000 --- a/tests/test_data/projects/snowpark_procedures_coverage/snowflake.yml +++ /dev/null @@ -1,12 +0,0 @@ -definition_version: 1 -snowpark: - project_name: "my_snowpark_project" - stage_name: "dev_deployment" - src: "app/" - procedures: - - name: foo - handler: "foo.func" - signature: - - name: "name" - type: "string" - returns: variant diff --git a/tests_integration/test_data/projects/snowpark_v2/app_1/a.py b/tests_integration/test_data/projects/snowpark_v2/app_1/a.py new file mode 100644 index 0000000000..9ee05d353d --- /dev/null +++ b/tests_integration/test_data/projects/snowpark_v2/app_1/a.py @@ -0,0 +1,10 @@ +from __future__ import annotations +from snowflake.snowpark import Session + + +# test import +import syrupy + + +def hello_procedure(session: Session, name: str) -> str: + return f"Hello {name}" diff --git a/tests_integration/test_data/projects/snowpark_v2/app_2/b.py b/tests_integration/test_data/projects/snowpark_v2/app_2/b.py new file mode 100644 index 0000000000..bef124997f --- /dev/null +++ b/tests_integration/test_data/projects/snowpark_v2/app_2/b.py @@ -0,0 +1,10 @@ +from __future__ import annotations +from snowflake.snowpark import Session + + +# test import +import syrupy + + +def test_procedure(session: Session) -> str: + return "Test procedure" diff --git a/tests_integration/test_data/projects/snowpark_v2/c.py b/tests_integration/test_data/projects/snowpark_v2/c.py new file mode 100644 index 0000000000..3ab4a6d6cc --- /dev/null +++ b/tests_integration/test_data/projects/snowpark_v2/c.py @@ -0,0 +1,9 @@ +from __future__ import annotations + + +# test import +import syrupy + + +def hello_function(name: str) -> str: + return f"Hello {name}!" diff --git a/tests_integration/test_data/projects/snowpark_v2/requirements.txt b/tests_integration/test_data/projects/snowpark_v2/requirements.txt new file mode 100644 index 0000000000..18af07a40d --- /dev/null +++ b/tests_integration/test_data/projects/snowpark_v2/requirements.txt @@ -0,0 +1 @@ +snowflake-snowpark-python syrupy \ No newline at end of file diff --git a/tests_integration/test_data/projects/snowpark_v2/snowflake.yml b/tests_integration/test_data/projects/snowpark_v2/snowflake.yml new file mode 100644 index 0000000000..b9d89fe01e --- /dev/null +++ b/tests_integration/test_data/projects/snowpark_v2/snowflake.yml @@ -0,0 +1,36 @@ +definition_version: 2 + +defaults: + stage: "dev_deployment" + +entities: + hello_procedure: + type: "procedure" + stage: "stage_a" + identifier: + name: "hello_procedure" + handler: "a.hello_procedure" + signature: + - name: "name" + type: "string" + returns: string + artifacts: + - "app_1/" + + test: + type: "procedure" + handler: "b.test_procedure" + signature: "" + returns: string + artifacts: + - "app_2/" + + hello_function: + type: "function" + handler: "c.hello_function" + signature: + - name: "name" + type: "string" + returns: string + artifacts: + - "c.py" diff --git a/tests_integration/test_snowpark.py b/tests_integration/test_snowpark.py index 8b7082012f..fa1ad17e10 100644 --- a/tests_integration/test_snowpark.py +++ b/tests_integration/test_snowpark.py @@ -40,7 +40,7 @@ def test_snowpark_flow( ): database = test_database.upper() with project_directory("snowpark") as tmp_dir: - _test_steps.snowpark_build_should_zip_files() + _test_steps.snowpark_build_should_zip_files(additional_files=[Path("app.zip")]) _test_steps.snowpark_deploy_should_finish_successfully_and_return( [ @@ -71,6 +71,7 @@ def test_snowpark_flow( expected_files = [ f"{STAGE_NAME}/my_snowpark_project/app.zip", + f"{STAGE_NAME}/my_snowpark_project/dependencies.zip", ] _test_steps.assert_that_only_these_files_are_staged_in_test_db( *expected_files, stage_name=STAGE_NAME @@ -347,7 +348,9 @@ def test_snowpark_with_separately_created_package( with project_directory("snowpark_with_import") as p_dir: - _test_steps.snowpark_build_should_zip_files(additional_files=[Path("app.zip")]) + _test_steps.snowpark_build_should_zip_files( + additional_files=[Path("app.zip")], no_dependencies=True + ) _test_steps.snowpark_deploy_should_finish_successfully_and_return( [ @@ -373,7 +376,9 @@ def test_snowpark_with_single_dependency_having_no_other_deps( result = runner.invoke_json(["snowpark", "build"]) assert result.exit_code == 0 - assert "dummy_pkg_for_tests/shrubbery.py" in ZipFile("app.zip").namelist() + assert ( + "dummy_pkg_for_tests/shrubbery.py" in ZipFile("dependencies.zip").namelist() + ) _test_steps.snowpark_deploy_should_finish_successfully_and_return( [ @@ -400,7 +405,7 @@ def test_snowpark_with_single_requirement_having_transient_deps( result = runner.invoke_json(["snowpark", "build"]) assert result.exit_code == 0 - files = ZipFile("app.zip").namelist() + files = ZipFile("dependencies.zip").namelist() assert "dummy_pkg_for_tests_with_deps/shrubbery.py" in files assert "dummy_pkg_for_tests/shrubbery.py" in files # as transient dep @@ -433,7 +438,7 @@ def test_snowpark_commands_executed_outside_project_dir( result = runner.invoke_json(["snowpark", "build", "--project", project_subpath]) assert result.exit_code == 0 - files = ZipFile(Path(project_subpath) / "app.zip").namelist() + files = ZipFile(Path(project_subpath) / "dependencies.zip").namelist() assert "dummy_pkg_for_tests_with_deps/shrubbery.py" in files assert "dummy_pkg_for_tests/shrubbery.py" in files # as transient dep @@ -461,7 +466,9 @@ def test_snowpark_default_arguments( ): database = test_database.upper() with project_directory("snowpark_with_default_values") as tmp_dir: - _test_steps.snowpark_build_should_zip_files() + _test_steps.snowpark_build_should_zip_files( + additional_files=[Path("app.zip")], no_dependencies=True + ) _test_steps.snowpark_deploy_should_finish_successfully_and_return( [ @@ -568,7 +575,7 @@ def test_snowpark_fully_qualified_name( ["sql", "-q", f"create schema {database}.{different_schema}"] ) with project_directory("snowpark_fully_qualified_name") as tmp_dir: - _test_steps.snowpark_build_should_zip_files() + _test_steps.snowpark_build_should_zip_files(additional_files=[Path("app.zip")]) # "default" database and schema provided by fully qualified name alter_snowflake_yml( @@ -682,7 +689,7 @@ def test_snowpark_vector_function( ): database = test_database.upper() with project_directory("snowpark_vectorized") as tmp_dir: - _test_steps.snowpark_build_should_zip_files() + _test_steps.snowpark_build_should_zip_files(additional_files=[Path("app.zip")]) _test_steps.snowpark_deploy_should_finish_successfully_and_return( [ @@ -726,7 +733,9 @@ def test_build_skip_version_check( ["snowpark", "build", "--skip-version-check"] ) assert result.exit_code == 0, result.output - assert "Build done. Artifact path: " in result.output + assert "Build done." in result.output + assert "Creating dependencies.zip" not in result.output + assert "Creating: app.zip" in result.output @pytest.mark.integration @@ -745,7 +754,9 @@ def test_build_with_anaconda_dependencies( alter_requirements_txt(tmp_dir / "requirements.txt", ["july", "snowflake.core"]) result = runner.invoke_with_connection(["snowpark", "build", *flags]) assert result.exit_code == 0, result.output - assert "Build done. Artifact path:" in result.output + assert "Build done." in result.output + assert "Creating dependencies.zip" in result.output + assert "Creating: app.zip" in result.output requirements_snowflake = tmp_dir / "requirements.snowflake.txt" if "--ignore-anaconda" in flags: @@ -767,9 +778,11 @@ def test_build_with_non_anaconda_dependencies( ) result = runner.invoke_with_connection(["snowpark", "build"]) assert result.exit_code == 0, result.output - assert "Build done. Artifact path:" in result.output + assert "Build done." in result.output + assert "Creating dependencies.zip" in result.output + assert "Creating: app.zip" in result.output - files = ZipFile(tmp_dir / "app.zip").namelist() + files = ZipFile(tmp_dir / "dependencies.zip").namelist() assert "dummy_pkg_for_tests/shrubbery.py" in files assert "dummy_pkg_for_tests_with_deps/shrubbery.py" in files @@ -820,11 +833,13 @@ def test_build_package_from_github( ) result = runner.invoke_with_connection(["snowpark", "build"]) assert result.exit_code == 0, result.output - assert "Build done. Artifact path:" in result.output + assert "Build done." in result.output + assert "Creating dependencies.zip" in result.output + assert "Creating: app.zip" in result.output assert ( "dummy_pkg_for_tests/shrubbery.py" - in ZipFile(tmp_dir / "app.zip").namelist() + in ZipFile(tmp_dir / "dependencies.zip").namelist() ) @@ -907,6 +922,75 @@ def test_snowpark_aliases(project_directory, runner, _test_steps, test_database) ] +@pytest.mark.integration +def test_snowpark_flow_v2( + _test_steps, project_directory, alter_snowflake_yml, test_database +): + database = test_database.upper() + with project_directory("snowpark_v2") as tmp_dir: + _test_steps.snowpark_build_should_zip_files( + additional_files=[Path("app_1.zip"), Path("app_2.zip")] + ) + _test_steps.snowpark_deploy_should_finish_successfully_and_return( + [ + { + "object": f"{database}.PUBLIC.hello_procedure(name string)", + "status": "created", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.test()", + "status": "created", + "type": "procedure", + }, + { + "object": f"{database}.PUBLIC.hello_function(name string)", + "status": "created", + "type": "function", + }, + ] + ) + + _test_steps.assert_those_procedures_are_in_snowflake( + "hello_procedure(VARCHAR) RETURN VARCHAR" + ) + _test_steps.assert_those_functions_are_in_snowflake( + "hello_function(VARCHAR) RETURN VARCHAR" + ) + + _test_steps.assert_that_only_these_files_are_staged_in_test_db( + "stage_a/my_project/app_1.zip", + "stage_a/my_project/dependencies.zip", + stage_name="stage_a", + ) + + _test_steps.assert_that_only_these_files_are_staged_in_test_db( + f"{STAGE_NAME}/my_project/app_2.zip", + f"{STAGE_NAME}/my_project/c.py", + f"{STAGE_NAME}/my_project/dependencies.zip", + stage_name=STAGE_NAME, + ) + + # Created objects can be executed + _test_steps.snowpark_execute_should_return_expected_value( + object_type="procedure", + identifier="hello_procedure('foo')", + expected_value="Hello foo", + ) + + _test_steps.snowpark_execute_should_return_expected_value( + object_type="procedure", + identifier="test()", + expected_value="Test procedure", + ) + + _test_steps.snowpark_execute_should_return_expected_value( + object_type="function", + identifier="hello_function('foo')", + expected_value="Hello foo!", + ) + + @pytest.fixture def _test_setup( runner, diff --git a/tests_integration/test_snowpark_external_access.py b/tests_integration/test_snowpark_external_access.py index eff6d42ebb..efe367da2b 100644 --- a/tests_integration/test_snowpark_external_access.py +++ b/tests_integration/test_snowpark_external_access.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from pathlib import Path import pytest @@ -24,7 +25,7 @@ def test_snowpark_external_access(project_directory, _test_steps, test_database): with project_directory("snowpark_external_access") as project_dir: - _test_steps.snowpark_build_should_zip_files() + _test_steps.snowpark_build_should_zip_files(additional_files=[Path("app.zip")]) _test_steps.snowpark_deploy_should_finish_successfully_and_return( [ @@ -59,7 +60,7 @@ def test_snowpark_upgrades_with_external_access( ): with project_directory("snowpark") as tmp_dir: - _test_steps.snowpark_build_should_zip_files() + _test_steps.snowpark_build_should_zip_files(additional_files=[Path("app.zip")]) _test_steps.snowpark_deploy_should_finish_successfully_and_return( [ diff --git a/tests_integration/testing_utils/snowpark_utils.py b/tests_integration/testing_utils/snowpark_utils.py index 41b12800b0..0844900ce5 100644 --- a/tests_integration/testing_utils/snowpark_utils.py +++ b/tests_integration/testing_utils/snowpark_utils.py @@ -163,10 +163,15 @@ def object_describe_should_return_entity_description( ) assert result.json is not None - def snowpark_build_should_zip_files(self, *args, additional_files=None) -> None: + def snowpark_build_should_zip_files( + self, *args, additional_files=None, no_dependencies=False + ) -> None: if not additional_files: additional_files = [] + if not no_dependencies: + additional_files.append(Path("dependencies.zip")) + current_files = set(Path(".").glob("**/*")) result = self._setup.runner.invoke_json( ["snowpark", "build", "--format", "JSON", *args] @@ -175,11 +180,10 @@ def snowpark_build_should_zip_files(self, *args, additional_files=None) -> None: assert result.exit_code == 0, result.output assert result.json, result.output assert "message" in result.json - assert "Build done. Artifact path:" in result.json["message"] # type: ignore + assert "Build done." in result.json["message"] # type: ignore assert_that_current_working_directory_contains_only_following_files( *current_files, - Path("app.zip"), *additional_files, Path("requirements.snowflake.txt"), excluded_paths=[".packages"],