Skip to content

Commit

Permalink
Merge branch 'main' into melnacouzi-fix-post-install-relative-path
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-melnacouzi authored Jul 19, 2024
2 parents e3249ab + 59905dc commit 7b9cb57
Show file tree
Hide file tree
Showing 61 changed files with 1,838 additions and 621 deletions.
2 changes: 1 addition & 1 deletion src/snowflake/cli/__about__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@

from __future__ import annotations

VERSION = "2.6.0.dev0"
VERSION = "2.6.1.dev0"
40 changes: 40 additions & 0 deletions src/snowflake/cli/api/commands/execution_metadata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Copyright (c) 2024 Snowflake Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import uuid
from dataclasses import dataclass, field
from enum import Enum


class ExecutionStatus(Enum):
SUCCESS = "success"
FAILURE = "failure"


@dataclass
class ExecutionMetadata:
start_time: float = 0.0
end_time: float = 0.0
status: ExecutionStatus = ExecutionStatus.SUCCESS
execution_id: str = field(default_factory=lambda: uuid.uuid4().hex)

def __post_init__(self):
self.start_time = time.monotonic()

def complete(self, status: ExecutionStatus):
self.end_time = time.monotonic()
self.status = status

def get_duration(self):
return self.end_time - self.start_time
29 changes: 20 additions & 9 deletions src/snowflake/cli/api/commands/snow_typer.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@
global_options,
global_options_with_connection,
)
from snowflake.cli.api.commands.execution_metadata import (
ExecutionMetadata,
ExecutionStatus,
)
from snowflake.cli.api.commands.flags import DEFAULT_CONTEXT_SETTINGS
from snowflake.cli.api.commands.typer_pre_execute import run_pre_execute_commands
from snowflake.cli.api.exceptions import CommandReturnTypeError
Expand Down Expand Up @@ -91,15 +95,18 @@ def custom_command(command_callable):
@wraps(command_callable)
def command_callable_decorator(*args, **kw):
"""Wrapper around command callable. This is what happens at "runtime"."""
self.pre_execute()
execution = ExecutionMetadata()
self.pre_execute(execution)
try:
result = command_callable(*args, **kw)
return self.process_result(result)
self.process_result(result)
execution.complete(ExecutionStatus.SUCCESS)
except Exception as err:
self.exception_handler(err)
execution.complete(ExecutionStatus.FAILURE)
self.exception_handler(err, execution)
raise
finally:
self.post_execute()
self.post_execute(execution)

return super(SnowTyper, self).command(name=name, **kwargs)(
command_callable_decorator
Expand All @@ -108,7 +115,7 @@ def command_callable_decorator(*args, **kw):
return custom_command

@staticmethod
def pre_execute():
def pre_execute(execution: ExecutionMetadata):
"""
Callback executed before running any command callable (after context execution).
Pay attention to make this method safe to use if performed operations are not necessary
Expand All @@ -118,7 +125,7 @@ def pre_execute():

log.debug("Executing command pre execution callback")
run_pre_execute_commands()
log_command_usage()
log_command_usage(execution)

@staticmethod
def process_result(result):
Expand All @@ -134,21 +141,25 @@ def process_result(result):
print_result(result)

@staticmethod
def exception_handler(exception: Exception):
def exception_handler(exception: Exception, execution: ExecutionMetadata):
"""
Callback executed on command execution error.
"""
from snowflake.cli.app.telemetry import log_command_execution_error

log.debug("Executing command exception callback")
log_command_execution_error(exception, execution)

@staticmethod
def post_execute():
def post_execute(execution: ExecutionMetadata):
"""
Callback executed after running any command callable. Pay attention to make this method safe to
use if performed operations are not necessary for executing the command in proper way.
"""
from snowflake.cli.app.telemetry import flush_telemetry
from snowflake.cli.app.telemetry import flush_telemetry, log_command_result

log.debug("Executing command post execution callback")
log_command_result(execution)
flush_telemetry()


Expand Down
4 changes: 3 additions & 1 deletion src/snowflake/cli/app/loggers.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,9 @@ def create_loggers(verbose: bool, debug: bool):
else:
# We need to remove handler definition - otherwise it creates file even if `save_logs` is False
del config.handlers["file"]
config.loggers["snowflake.cli"].handlers.remove("file")
for logger in config.loggers.values():
if "file" in logger.handlers:
logger.handlers.remove("file")

config.loggers["snowflake.cli"].level = global_log_level
config.loggers["snowflake"].level = global_log_level
Expand Down
43 changes: 41 additions & 2 deletions src/snowflake/cli/app/telemetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import click
from snowflake.cli.__about__ import VERSION
from snowflake.cli.api.cli_global_context import cli_context
from snowflake.cli.api.commands.execution_metadata import ExecutionMetadata
from snowflake.cli.api.config import get_feature_flags_section
from snowflake.cli.api.output.formats import OutputFormat
from snowflake.cli.api.utils.error_handling import ignore_exceptions
Expand All @@ -44,19 +45,25 @@ class CLITelemetryField(Enum):
COMMAND = "command"
COMMAND_GROUP = "command_group"
COMMAND_FLAGS = "command_flags"
COMMAND_EXECUTION_ID = "command_execution_id"
COMMAND_RESULT_STATUS = "command_result_status"
COMMAND_OUTPUT_TYPE = "command_output_type"
COMMAND_EXECUTION_TIME = "command_execution_time"
# Configuration
CONFIG_FEATURE_FLAGS = "config_feature_flags"
# Information
EVENT = "event"
ERROR_MSG = "error_msg"
ERROR_TYPE = "error_type"
IS_CLI_EXCEPTION = "is_cli_exception"
# Project context
PROJECT_DEFINITION_VERSION = "project_definition_version"


class TelemetryEvent(Enum):
CMD_EXECUTION = "executing_command"
CMD_EXECUTION_ERROR = "error_executing_command"
CMD_EXECUTION_RESULT = "result_executing_command"


TelemetryDict = Dict[Union[CLITelemetryField, TelemetryField], Any]
Expand Down Expand Up @@ -141,8 +148,40 @@ def flush(self):


@ignore_exceptions()
def log_command_usage():
_telemetry.send({TelemetryField.KEY_TYPE: TelemetryEvent.CMD_EXECUTION.value})
def log_command_usage(execution: ExecutionMetadata):
_telemetry.send(
{
TelemetryField.KEY_TYPE: TelemetryEvent.CMD_EXECUTION.value,
CLITelemetryField.COMMAND_EXECUTION_ID: execution.execution_id,
}
)


@ignore_exceptions()
def log_command_result(execution: ExecutionMetadata):
_telemetry.send(
{
TelemetryField.KEY_TYPE: TelemetryEvent.CMD_EXECUTION_RESULT.value,
CLITelemetryField.COMMAND_EXECUTION_ID: execution.execution_id,
CLITelemetryField.COMMAND_RESULT_STATUS: execution.status.value,
CLITelemetryField.COMMAND_EXECUTION_TIME: execution.get_duration(),
}
)


@ignore_exceptions()
def log_command_execution_error(exception: Exception, execution: ExecutionMetadata):
exception_type: str = type(exception).__name__
is_cli_exception: bool = issubclass(exception.__class__, click.ClickException)
_telemetry.send(
{
TelemetryField.KEY_TYPE: TelemetryEvent.CMD_EXECUTION_ERROR.value,
CLITelemetryField.COMMAND_EXECUTION_ID: execution.execution_id,
CLITelemetryField.ERROR_TYPE: exception_type,
CLITelemetryField.IS_CLI_EXCEPTION: is_cli_exception,
CLITelemetryField.COMMAND_EXECUTION_TIME: execution.get_duration(),
}
)


@ignore_exceptions()
Expand Down
40 changes: 40 additions & 0 deletions src/snowflake/cli/plugins/nativeapp/codegen/artifact_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from __future__ import annotations

from abc import ABC, abstractmethod
from pathlib import Path
from typing import Optional

from click import ClickException
Expand All @@ -34,6 +35,42 @@ def __init__(self, processor_name: str):
)


def is_python_file_artifact(src: Path, _: Path):
"""Determines whether the provided source path is an existing python file."""
return src.is_file() and src.suffix == ".py"


class ProjectFileContextManager:
"""
A context manager that encapsulates the logic required to update a project file
in processor logic. The processor can use this manager to gain access to the contents
of a file, and optionally provide replacement contents. If it does, the file is
correctly modified in the deploy root directory to reflect the new contents.
"""

def __init__(self, path: Path):
self.path = path
self._contents = None
self.edited_contents = None

@property
def contents(self):
return self._contents

def __enter__(self):
self._contents = self.path.read_text(encoding="utf-8")

return self

def __exit__(self, exc_type, exc_val, exc_tb):
if self.edited_contents is not None:
if self.path.is_symlink():
# if the file is a symlink, make sure we don't overwrite the original
self.path.unlink()

self.path.write_text(self.edited_contents, encoding="utf-8")


class ArtifactProcessor(ABC):
def __init__(
self,
Expand All @@ -49,3 +86,6 @@ def process(
**kwargs,
) -> None:
pass

def edit_file(self, path: Path):
return ProjectFileContextManager(path)
84 changes: 57 additions & 27 deletions src/snowflake/cli/plugins/nativeapp/codegen/compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,22 @@
ArtifactProcessor,
UnsupportedArtifactProcessorError,
)
from snowflake.cli.plugins.nativeapp.codegen.setup.native_app_setup_processor import (
NativeAppSetupProcessor,
)
from snowflake.cli.plugins.nativeapp.codegen.snowpark.python_processor import (
SnowparkAnnotationProcessor,
)
from snowflake.cli.plugins.nativeapp.feature_flags import FeatureFlag
from snowflake.cli.plugins.nativeapp.project_model import NativeAppProjectModel

SNOWPARK_PROCESSOR = "snowpark"
NA_SETUP_PROCESSOR = "native-app-setup"

_REGISTERED_PROCESSORS_BY_NAME = {
SNOWPARK_PROCESSOR: SnowparkAnnotationProcessor,
NA_SETUP_PROCESSOR: NativeAppSetupProcessor,
}


class NativeAppCompiler:
Expand All @@ -54,28 +64,31 @@ def compile_artifacts(self):
Go through every artifact object in the project definition of a native app, and execute processors in order of specification for each of the artifact object.
May have side-effects on the filesystem by either directly editing source files or the deploy root.
"""
should_proceed = False
for artifact in self._na_project.artifacts:
if artifact.processors:
should_proceed = True
break
if not should_proceed:

if not self._should_invoke_processors():
return

with cc.phase("Invoking artifact processors"):
if self._na_project.generated_root.exists():
raise ClickException(
f"Path {self._na_project.generated_root} already exists. Please choose a different name for your generated directory in the project definition file."
)

for artifact in self._na_project.artifacts:
for processor in artifact.processors:
artifact_processor = self._try_create_processor(
processor_mapping=processor,
)
if artifact_processor is None:
raise UnsupportedArtifactProcessorError(
processor_name=processor.name
)
else:
artifact_processor.process(
artifact_to_process=artifact, processor_mapping=processor
if self._is_enabled(processor):
artifact_processor = self._try_create_processor(
processor_mapping=processor,
)
if artifact_processor is None:
raise UnsupportedArtifactProcessorError(
processor_name=processor.name
)
else:
artifact_processor.process(
artifact_to_process=artifact,
processor_mapping=processor,
)

def _try_create_processor(
self,
Expand All @@ -86,15 +99,32 @@ def _try_create_processor(
Fetch processor object if one already exists in the cached_processors dictionary.
Else, initialize a new object to return, and add it to the cached_processors dictionary.
"""
if processor_mapping.name.lower() == SNOWPARK_PROCESSOR:
curr_processor = self.cached_processors.get(SNOWPARK_PROCESSOR, None)
if curr_processor is not None:
return curr_processor
else:
curr_processor = SnowparkAnnotationProcessor(
na_project=self._na_project,
)
self.cached_processors[SNOWPARK_PROCESSOR] = curr_processor
return curr_processor
else:
processor_name = processor_mapping.name.lower()
current_processor = self.cached_processors.get(processor_name)

if current_processor is not None:
return current_processor

processor_factory = _REGISTERED_PROCESSORS_BY_NAME.get(processor_name)
if processor_factory is None:
# No registered processor with the specified name
return None

current_processor = processor_factory(
na_project=self._na_project,
)
self.cached_processors[processor_name] = current_processor

return current_processor

def _should_invoke_processors(self):
for artifact in self._na_project.artifacts:
for processor in artifact.processors:
if self._is_enabled(processor):
return True
return False

def _is_enabled(self, processor: ProcessorMapping) -> bool:
if processor.name.lower() == NA_SETUP_PROCESSOR:
return FeatureFlag.ENABLE_NATIVE_APP_PYTHON_SETUP.is_enabled()
return True
Loading

0 comments on commit 7b9cb57

Please sign in to comment.