Skip to content

Commit

Permalink
Merge branch 'master' into snyk-fix-e930fc3cebe1714f04d5c53d8ea3f372
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-dszmolka authored Dec 11, 2023
2 parents 653ec4b + e25955c commit f6496ac
Show file tree
Hide file tree
Showing 11,389 changed files with 229,071 additions and 775,790 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
41 changes: 41 additions & 0 deletions .github/workflows/build-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,3 +86,44 @@ jobs:
# BUILD_TYPE: ${{ matrix.build_type }}
# CLOUD_PROVIDER: ${{ matrix.cloud }}
# run: /usr/local/bin/bash ./ci/build_mac.sh
build-test-codecov:
needs: [build-test-linux, build-test-win]
name: Build-Test-CodeCov-Linux
runs-on: ubuntu-latest
strategy:
matrix:
build_type: [ 'Release' ]
steps:
- uses: actions/checkout@v3
- name: Build
shell: bash
env:
BUILD_TYPE: ${{ matrix.build_type }}
CLIENT_CODE_COVERAGE: 1
run: ci/build_linux.sh
- uses: actions/setup-python@v1
with:
python-version: '3.7'
architecture: 'x64'
- name: Test on AWS
shell: bash
env:
BUILD_TYPE: ${{ matrix.build_type }}
CLIENT_CODE_COVERAGE: 1
CLOUD_PROVIDER: AWS
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
run: ci/test_linux.sh
- name: Test on Azure
shell: bash
env:
BUILD_TYPE: ${{ matrix.build_type }}
CLIENT_CODE_COVERAGE: 1
CLOUD_PROVIDER: AZURE
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
run: ci/test_linux.sh
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
with:
# without the token code cov may fail because of Github limits https://github.com/codecov/codecov-action/issues/557
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
fail_ci_if_error: true
5 changes: 5 additions & 0 deletions .github/workflows/snyk-issue.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@ on:
schedule:
- cron: '* */12 * * *'

permissions:
contents: read
issues: write
pull-requests: write

concurrency: snyk-issue

jobs:
Expand Down
8 changes: 7 additions & 1 deletion .github/workflows/snyk-pr.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,14 @@
name: snyk-pr
name: Snyk-PR
on:
pull_request:
branches:
- master

permissions:
contents: read
issues: write
pull-requests: write

jobs:
snyk:
runs-on: ubuntu-latest
Expand Down
6 changes: 4 additions & 2 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,8 @@ endif ()
set(CMAKE_VERBOSE_MAKEFILE ON)
if (UNIX)
# Linux and OSX
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c99 -std=gnu99 -g -fPIC -Werror ${MOCK_OBJECT_WRAPPER_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -std=gnu++11 -fPIC -Werror ${MOCK_OBJECT_WRAPPER_FLAGS}")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c99 -std=gnu99 -g -fPIC -Werror -Wno-error=deprecated-declarations ${MOCK_OBJECT_WRAPPER_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -std=gnu++11 -fPIC -Werror -Wno-error=deprecated-declarations ${MOCK_OBJECT_WRAPPER_FLAGS}")
else()
# Windows
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /ZH:SHA_256")
Expand Down Expand Up @@ -218,6 +218,8 @@ set(SOURCE_FILES_CPP_WRAPPER
cpp/lib/ArrowChunkIterator.hpp
cpp/lib/DataConversion.cpp
cpp/lib/DataConversion.hpp
cpp/lib/QueryContextCache.cpp
cpp/lib/QueryContextCache.hpp
cpp/lib/result_set.cpp
cpp/lib/result_set_arrow.cpp
cpp/lib/result_set_json.cpp
Expand Down
4 changes: 3 additions & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@ Snowflake Connector for C/C++
.. image:: https://github.com/snowflakedb/libsnowflakeclient/workflows/Build%20and%20Test/badge.svg?branch=master
:target: https://github.com/snowflakedb/libsnowflakeclient/actions?query=workflow%3A%22Build+and+Test%22+branch%3Amaster

.. image:: https://codecov.io/github/snowflakedb/libsnowflakeclient/coverage.svg?branch=master
:target: https://codecov.io/github/snowflakedb/libsnowflakeclient?branch=master

.. image:: http://img.shields.io/:license-Apache%202-brightgreen.svg
:target: http://www.apache.org/licenses/LICENSE-2.0.txt

*Under development. No functionality works. Suggestion is welcome at any time.*

Build and Tests
======================================================================
Expand Down
1,908 changes: 956 additions & 952 deletions cacert.pem

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions ci/_init.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@ export DRIVER_NAME=libsnowflakeclient

# Build images
BUILD_IMAGE_VERSION_X64=1
BUILD_IMAGE_VERSION_AARCH64=2
BUILD_IMAGE_VERSION_AARCH64=3

# Test Images
TEST_IMAGE_VERSION_X64=1
TEST_IMAGE_VERSION_AARCH64=2
TEST_IMAGE_VERSION_AARCH64=3

PLATFORM_ARCH=$(uname -p)
if [[ "$PLATFORM_ARCH" == "aarch64" ]]; then
Expand Down
6 changes: 6 additions & 0 deletions ci/build/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,12 @@ download_build_component aws "$SCRIPTS_DIR/build_awssdk.sh" "$target"
download_build_component azure "$SCRIPTS_DIR/build_azuresdk.sh" "$target"
download_build_component cmocka "$SCRIPTS_DIR/build_cmocka.sh" "$target"
download_build_component arrow "$SCRIPTS_DIR/build_arrow.sh" "$target"

# very tight diskspace limit on github runners, clear deps folder with all .o files
if [[ -n "$GITHUB_ACTIONS" ]]; then
rm -rf $SCRIPTS_DIR/../deps/*
fi

build_component libsnowflakeclient "$SCRIPTS_DIR/build_libsnowflakeclient.sh" "$target" "$@"


7 changes: 7 additions & 0 deletions ci/build_linux.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,12 @@ else
export GIT_URL=https://github.com/${GITHUB_REPOSITORY}.git
export GIT_BRANCH=origin/$(basename ${GITHUB_REF})
export GIT_COMMIT=${GITHUB_SHA}
# remove unnecssary files to save disk space on github
# base on the workaround provided here:
# https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
fi

BUILD_IMAGE_NAME="${BUILD_IMAGE_NAMES[$DRIVER_NAME-$DOCKER_MARK]}"
Expand All @@ -37,6 +43,7 @@ docker run \
-e GITHUB_SHA \
-e GITHUB_EVENT_NAME \
-e GITHUB_REF \
-e CLIENT_CODE_COVERAGE \
-w /mnt/host \
"${BUILD_IMAGE_NAME}" \
"/mnt/host/ci/build/build.sh"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ ENV DEBIAN_FRONTEND="noninteractive" TZ="Etc/UTC"
RUN apt-get update

# dev toolsets
RUN apt-get install -y git cmake g++ zip python3.8 libre2-dev unixodbc-dev unixodbc libcppunit-dev vim
RUN apt-get install -y git cmake g++ zip python3.8 python3.8-venv libre2-dev unixodbc-dev unixodbc libcppunit-dev vim jq

RUN apt-get install -y gosu \
&& ln -s /usr/sbin/gosu /usr/local/bin/gosu
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
FROM nexus.int.snowflakecomputing.com:8086/docker/client-libsnowflakeclient-ubuntu20-aarch64:2
FROM nexus.int.snowflakecomputing.com:8086/docker/client-libsnowflakeclient-ubuntu20-aarch64:3
16 changes: 15 additions & 1 deletion ci/test/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,11 @@ echo "CMAKE: $CMAKE, CTEST: $CTEST"
source $SCRIPTS_DIR/utils.sh

init_git_variables
set_parameters $cloud_provider
set_parameters $CLOUD_PROVIDER
source $SCRIPTS_DIR/env.sh

CLIENT_CODE_COVERAGE=${CLIENT_CODE_COVERAGE:-0}

echo "=== setting test schema"
if [[ -n "$JOB_NAME" ]]; then
export SNOWFLAKE_TEST_SCHEMA=JENKINS_${JOB_NAME//-/_}_${BUILD_NUMBER}
Expand Down Expand Up @@ -76,8 +78,20 @@ function drop_schema()
popd
}

function generate_gcov()
{
echo "=== running gcov"
pushd $SCRIPTS_DIR
bash gen_gcov.sh $BUILD_TYPE
popd
}

trap drop_schema EXIT

init_python
create_schema
test_component libsnowflakeclient "$SCRIPTS_DIR/build_libsnowflakeclient.sh" "$BUILD_TYPE"

if [[ $CLIENT_CODE_COVERAGE -eq 1 ]]; then
generate_gcov "$BUILD_TYPE"
fi
3 changes: 2 additions & 1 deletion ci/test_linux.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ docker run \
-v $(cd $THIS_DIR/.. && pwd):/mnt/host \
-v $WORKSPACE:/mnt/workspace \
-e LOCAL_USER_ID=$(id -u $USER) \
-e cloud_provider \
-e CLOUD_PROVIDER \
-e SNOWFLAKE_TEST_CA_BUNDLE_FILE \
-e GIT_COMMIT \
-e GIT_BRANCH \
Expand All @@ -43,6 +43,7 @@ docker run \
-e GITHUB_ACTIONS \
-e GITHUB_SHA \
-e RUNNER_TRACKING_ID \
-e CLIENT_CODE_COVERAGE \
-w /mnt/host \
"${TEST_IMAGE_NAME}" \
"/mnt/host/ci/test/test.sh"
6 changes: 4 additions & 2 deletions ci/test_win.bat
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
@echo off
setlocal
if not defined GITHUB_ACTIONS (
set "path=C:\Program Files\7-Zip;C:\Python37;C:\python37\scripts;%path%"
set "path=C:\Program Files\7-Zip;%path%"
)
set scriptdir=%~dp0
set curdir=%cd%
Expand Down Expand Up @@ -87,12 +87,14 @@ exit /b 0
:init_python
@echo off
echo === creating venv
python -m venv venv
py -3.7 -m venv venv
call venv\scripts\activate
python -m pip install -U pip > nul 2>&1
if %ERRORLEVEL% NEQ 0 goto :error
pip install snowflake-connector-python > nul 2>&1
if %ERRORLEVEL% NEQ 0 goto :error
python -m pip install -U awscli > nul 2>&1
if %ERRORLEVEL% NEQ 0 goto :error
exit /b 0

:create_schema
Expand Down
33 changes: 19 additions & 14 deletions cpp/FileMetadataInitializer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,12 @@

Snowflake::Client::FileMetadataInitializer::FileMetadataInitializer(
std::vector<FileMetadata> &smallFileMetadata,
std::vector<FileMetadata> &largeFileMetadata) :
std::vector<FileMetadata> &largeFileMetadata,
IStatementPutGet *stmtPutGet) :
m_smallFileMetadata(smallFileMetadata),
m_largeFileMetadata(largeFileMetadata),
m_autoCompress(true)
m_autoCompress(true),
m_stmtPutGet(stmtPutGet)
{
}

Expand All @@ -39,9 +41,9 @@ Snowflake::Client::FileMetadataInitializer::initUploadFileMetadata(const std::st
fileNameFull += fileName;

FileMetadata fileMetadata;
fileMetadata.srcFileName = fileNameFull;
fileMetadata.srcFileName = m_stmtPutGet->platformStringToUTF8(fileNameFull);
fileMetadata.srcFileSize = fileSize;
fileMetadata.destFileName = std::string(fileName);
fileMetadata.destFileName = m_stmtPutGet->platformStringToUTF8(std::string(fileName));
// process compression type
initCompressionMetadata(fileMetadata);

Expand All @@ -56,13 +58,16 @@ void Snowflake::Client::FileMetadataInitializer::populateSrcLocUploadMetadata(st
size_t putThreshold)
{
// looking for files on disk.
std::string srcLocationPlatform = m_stmtPutGet->UTF8ToPlatformString(sourceLocation);

#ifdef _WIN32
WIN32_FIND_DATA fdd;
HANDLE hFind = FindFirstFile(sourceLocation.c_str(), &fdd);
HANDLE hFind = FindFirstFile(srcLocationPlatform.c_str(), &fdd);
if (hFind == INVALID_HANDLE_VALUE)
{
DWORD dwError = GetLastError();
if (dwError == ERROR_NO_MORE_FILES || dwError == ERROR_FILE_NOT_FOUND)
if (dwError == ERROR_NO_MORE_FILES || dwError == ERROR_FILE_NOT_FOUND ||
dwError == ERROR_PATH_NOT_FOUND)
{
CXX_LOG_ERROR("No file matching pattern %s has been found. Error: %d",
sourceLocation.c_str(), dwError);
Expand All @@ -73,22 +78,22 @@ void Snowflake::Client::FileMetadataInitializer::populateSrcLocUploadMetadata(st
{
CXX_LOG_ERROR("Failed on FindFirstFile. Error: %d", dwError);
throw SnowflakeTransferException(TransferError::DIR_OPEN_ERROR,
sourceLocation.c_str(), dwError);
srcLocationPlatform.c_str(), dwError);
}
}

do {
if (!(fdd.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) )
{
std::string fileFullPath = std::string(fdd.cFileName);
size_t dirSep = sourceLocation.find_last_of(PATH_SEP);
size_t dirSep = srcLocationPlatform.find_last_of(PATH_SEP);
if (dirSep == std::string::npos)
{
dirSep = sourceLocation.find_last_of(ALTER_PATH_SEP);
}
if (dirSep != std::string::npos)
{
std::string dirPath = sourceLocation.substr(0, dirSep + 1);
std::string dirPath = srcLocationPlatform.substr(0, dirSep + 1);
LARGE_INTEGER fileSize;
fileSize.LowPart = fdd.nFileSizeLow;
fileSize.HighPart = fdd.nFileSizeHigh;
Expand All @@ -102,14 +107,14 @@ void Snowflake::Client::FileMetadataInitializer::populateSrcLocUploadMetadata(st
{
CXX_LOG_ERROR("Failed on FindNextFile. Error: %d", dwError);
throw SnowflakeTransferException(TransferError::DIR_OPEN_ERROR,
sourceLocation.c_str(), dwError);
srcLocationPlatform.c_str(), dwError);
}
FindClose(hFind);

#else
unsigned long dirSep = sourceLocation.find_last_of(PATH_SEP);
std::string dirPath = sourceLocation.substr(0, dirSep + 1);
std::string filePattern = sourceLocation.substr(dirSep + 1);
unsigned long dirSep = srcLocationPlatform.find_last_of(PATH_SEP);
std::string dirPath = srcLocationPlatform.substr(0, dirSep + 1);
std::string filePattern = srcLocationPlatform.substr(dirSep + 1);

DIR * dir = nullptr;
struct dirent * dir_entry;
Expand All @@ -133,7 +138,7 @@ void Snowflake::Client::FileMetadataInitializer::populateSrcLocUploadMetadata(st
{
CXX_LOG_ERROR("Cannot read path struct");
throw SnowflakeTransferException(TransferError::DIR_OPEN_ERROR,
sourceLocation.c_str(), ret);
srcLocationPlatform.c_str(), ret);
}
}
}
Expand Down
7 changes: 6 additions & 1 deletion cpp/FileMetadataInitializer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
#include <vector>
#include "FileMetadata.hpp"
#include "IStorageClient.hpp"
#include "snowflake/IStatementPutGet.hpp"

// used to decide whether to upload in sequence or in parallel
#define DEFAULT_UPLOAD_DATA_SIZE_THRESHOLD 209715200 //200Mb
Expand All @@ -25,7 +26,8 @@ class FileMetadataInitializer
{
public:
FileMetadataInitializer(std::vector<FileMetadata> &smallFileMetadata,
std::vector<FileMetadata> &largeFileMetadata);
std::vector<FileMetadata> &largeFileMetadata,
IStatementPutGet *stmtPutGet);

/**
* Given a source locations, find all files that match the location pattern,
Expand Down Expand Up @@ -101,6 +103,9 @@ class FileMetadataInitializer

/// Random device for crytpo random num generator.
Crypto::CryptoRandomDevice m_randDevice;

// statement which provides encoding conversion funcationality
IStatementPutGet *m_stmtPutGet;
};
}
}
Expand Down
Loading

0 comments on commit f6496ac

Please sign in to comment.