Skip to content

Commit

Permalink
Merge pull request #9 from SergeyIvanov87/fix_workflows
Browse files Browse the repository at this point in the history
Fix workflows, remove unsupported Win workflow
  • Loading branch information
SergeyIvanov87 authored Oct 26, 2023
2 parents 454018c + 1055d7e commit 4014e7c
Show file tree
Hide file tree
Showing 30 changed files with 80 additions and 79 deletions.
19 changes: 5 additions & 14 deletions .github/workflows/cmake-multi-platform.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,26 +23,17 @@ jobs:
#
# To add more build types (Release, Debug, RelWithDebInfo, etc.) customize the build_type list.
matrix:
os: [ubuntu-latest, windows-latest]
os: [ubuntu-latest]
build_type: [Release]
c_compiler: [gcc, clang, cl]
c_compiler: [gcc, clang]
include:
- os: windows-latest
c_compiler: cl
cpp_compiler: cl
- os: ubuntu-latest
c_compiler: gcc
cpp_compiler: g++
- os: ubuntu-latest
c_compiler: clang
cpp_compiler: clang++
exclude:
- os: windows-latest
c_compiler: gcc
- os: windows-latest
c_compiler: clang
- os: ubuntu-latest
c_compiler: cl
# exclude:

steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -74,12 +65,12 @@ jobs:
uses: everpcpc/elasticsearch-action@v2
with:
# The version of the Elasticsearch you want to run
version: 7.10
version: 7.10.0-amd64
# Plugins to install before start
#plugins:

- name: Test
working-directory: ${{ steps.strings.outputs.build-output-dir }}
# Execute tests defined by the CMake configuration. Note that --build-config is needed because the default Windows generator is a multi-config generator (Visual Studio generator).
# See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail
run: SET UT_HOST_ADDR=http://es1:9200 ctest --build-config ${{ matrix.build_type }}
run: export UT_HOST_ADDR=http://es1:9200 && ctest --build-config ${{ matrix.build_type }}
4 changes: 3 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ set (PROC_LIB_NAME "cpp_book_indexer")

project(${PROC_NAME} CXX C)

include(CheckCCompilerFlag)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)

option(PICKY_COMPILER "Enable picky compiler" OFF)

set (BRANCH_ROOT ${CMAKE_CURRENT_SOURCE_DIR})
Expand Down
32 changes: 16 additions & 16 deletions bin/es.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ namespace documents
template<class T, class ...U>
std::vector<T> tmp_entry_extractor(std::vector<tmp_entry> &tmp_entries,
std::filesystem::path extract_prefix,
int &error_number,
size_t &error_number,
U &&...args);
}

Expand Down Expand Up @@ -234,7 +234,7 @@ int main(int argc, const char* argv[])
}

//to form csv
for(const auto ret_data_it : ret_data)
for(const auto &ret_data_it : ret_data)
{
std::cout << ret_data_it.first << ",";
std::cout << std::get<0>(ret_data_it.second) << ",";
Expand Down Expand Up @@ -312,8 +312,8 @@ int main(int argc, const char* argv[])
std::vector<documents::tmp_entry> restored_documents;

// request for documents
int skipped_err_files = 0;
for (int i = 0; i < requested_doc_num; i += group_size)
size_t skipped_err_files = 0;
for (i = 0; i < requested_doc_num; i += group_size)
{
size_t requested_document_count = std::min(requested_doc_num - i, group_size);
try
Expand All @@ -323,7 +323,7 @@ int main(int argc, const char* argv[])
catch (const std::exception &ex)
{
std::cerr << "Cannot get some or more documents:\n" << std::endl;
for (int j = 0; j < requested_document_count; j ++)
for (size_t j = 0; j < requested_document_count; j ++)
{
std::cerr << *(argv + doc_arg_index + i + j) << std::endl;
}
Expand All @@ -336,7 +336,7 @@ int main(int argc, const char* argv[])
// create temporary files
size_t prev_records = restored_documents.size();
restored_documents.reserve(prev_records + ret_data.size());
for(const auto ret_data_it : ret_data)
for(const auto &ret_data_it : ret_data)
{
try
{
Expand Down Expand Up @@ -417,9 +417,9 @@ int main(int argc, const char* argv[])
{
std::cout << "Available schemas:" << std::endl;
std::string list("\t");
for (const char **s = schema_indices; *s; s++)
for (const char **s_ptr = schema_indices; *s_ptr; s_ptr++)
{
list = list + *s + ',';
list = list + *s_ptr + ',';
}
if (!list.empty()) list.pop_back();
std::cout << list<< std ::endl;
Expand All @@ -429,21 +429,21 @@ int main(int argc, const char* argv[])
{
const char *found_schema = nullptr;
size_t argv_len = strlen(argv[1]);
for (const char **s = schema_indices; *s; s++)
for (const char **s_ptr = schema_indices; *s_ptr; s_ptr++)
{
size_t s_len = strlen(*s);
if (argv_len == s_len && !strncmp(argv[1], *s, s_len))
size_t s_len = strlen(*s_ptr);
if (argv_len == s_len && !strncmp(argv[1], *s_ptr, s_len))
{
found_schema = *s;
found_schema = *s_ptr;
break;
}
}
if (!found_schema)
{
std::cout << "Unexpected schema: " << argv[1] << ". Check on list of available schemas:" <<std::endl;
for (const char **s = schema_indices; *s; s++)
for (const char **s_ptr = schema_indices; *s_ptr; s_ptr++)
{
std::cout << *s << std ::endl;
std::cout << *s_ptr << std ::endl;
}
return -1;
}
Expand Down Expand Up @@ -518,7 +518,7 @@ tmp_entry::tmp_entry(std::string_view doc_name, const bin::v7::dispatcher::csv_d
size_t written_size = 0;
do
{
int ret = write(fd, doc_data.data(), size_to_write - written_size);
ssize_t ret = write(fd, doc_data.data(), size_to_write - written_size);
if (ret == -1)
{
if (errno != EINTR)
Expand Down Expand Up @@ -645,7 +645,7 @@ permanent_plain_entry::permanent_plain_entry(tmp_entry &&in, std::filesystem::pa
template <class T, class ... U>
std::vector<T> tmp_entry_extractor(std::vector<tmp_entry> &tmp_entries,
std::filesystem::path extract_prefix,
int &error_number,
size_t &error_number,
U &&...args)
{
std::vector<T> copied_files;
Expand Down
15 changes: 7 additions & 8 deletions bin/v7/dispatcher.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ get_match_elem<elasticsearch::image::search::tag::geo_bbox,
const char *pEnd = pStart;
while (*pStart && values.size() != float_num)
{
const char *pEnd = elasticsearch::utils::get_next_char_if(pStart, [sep]( const char* sym) { return (*sym == sep);});
pEnd = elasticsearch::utils::get_next_char_if(pStart, [sep]( const char* sym) { return (*sym == sep);});
try
{
values.push_back(stof(std::string(pStart, pEnd - pStart)));
Expand Down Expand Up @@ -109,7 +109,6 @@ template<class Model, class SearchRequest, class Tracer>
std::vector<record_t<Model>>
extract_model_records(const std::shared_ptr<SearchRequest> &search_ptr, Tracer tracer)
{
using transaction = SearchRequest;
using data = Model;

if (!search_ptr)
Expand Down Expand Up @@ -465,7 +464,7 @@ void request_image_index_mapping_delete(const dispatcher &d, Tracer)
}

template<class Tracer>
void request_rm_data(const dispatcher &d, std::ostream &out, const char *index, const char *doc_path_id, Tracer tracer)
void request_rm_data(const dispatcher &d, std::ostream &out, const char *index, const char *doc_path_id, Tracer)
{
std::optional<elasticsearch::v7::delete_data::response> ans_ptr;
if (!strcmp(index, schema_indices[0]))
Expand Down Expand Up @@ -527,8 +526,8 @@ void request_put_data(const dispatcher &d, std::ostream &out,
bin::data_manipulation::inject_to_model<elasticsearch::book::model::data,
BOOK_DATA_MODEL_ELEMENTS>(*book_model_promise, override_model_params);
}
catch (const std::exception& ex) {
exception_logging << "Book schema parse failed, reason:\n" << ex.what() << std::endl;
catch (const std::exception& exx) {
exception_logging << "Book schema parse failed, reason:\n" << exx.what() << std::endl;
throw std::runtime_error(std::string("unsupported format by path: ") + file_path + ", error: " + exception_logging.str());
}
}
Expand Down Expand Up @@ -595,9 +594,9 @@ void request_put_data(const dispatcher &d, std::ostream &out,

template <class Tracer>
std::map<std::string, dispatcher::csv_data_t>
request_get_data(const dispatcher &d, std::ostream &out,
request_get_data(const dispatcher &d, std::ostream &,
const char *index, const char **document_names, size_t document_count,
Tracer tracer)
Tracer)
{
std::map<std::string, dispatcher::csv_data_t> ret;
if (!strcmp(index, schema_indices[0]))
Expand Down Expand Up @@ -637,7 +636,7 @@ std::map<std::string, dispatcher::csv_data_t>

template <class Tracer>
void request_update_data(dispatcher &d,
std::ostream &out, const char *file_path, const char* document_id,
std::ostream &, const char *file_path, const char* document_id,
const std::map<std::string, std::string>& override_model_params,
Tracer tracer)
{
Expand Down
1 change: 1 addition & 0 deletions cmake/FindCURL.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -47,3 +47,4 @@ if(WITH_CURL)
list (APPEND COMPILE_DEFS -DWITH_CURL)
endif()
endif()
message("CURL has been bootstrapped and the package resides in: ${CURL_INSTALLED_PATH}")
1 change: 1 addition & 0 deletions cmake/FindTXML.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -57,3 +57,4 @@ endif()
include(${TEMPLATE_XML_PATH}/lib/txml/cmake/FindLibXML2.cmake)
target_link_libraries(${TXML_TARGET} INTERFACE ${XML_TARGET})
add_dependencies(${TXML_TARGET} ${XML_TARGET})
message("TemplateXML has been bootstrapped and the package resides in: ${TEMPLATE_XML_PATH}")
1 change: 0 additions & 1 deletion elasticsearch/books/FB2/fb2_reader.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

namespace fb2
{
class ShortFictionBook;
class packer;
class reader : public elasticsearch::book::packer_interface_aggregator
{
Expand Down
1 change: 1 addition & 0 deletions elasticsearch/books/FB2/fb2_unpacker.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

#include <memory>
#include <string>
//#include <txml/txml.hpp>
#include "elasticsearch/common_model/BinaryBlob.h"
#include "elasticsearch/common_model/OriginalPath.h"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ struct to_model_data : public txml::FormatSerializerBase<to_model_data, txml::St
val.make_format_serialize(*this, tracer);
}
template<class Tracer>
void serialize_impl(const ::fb2::FB2TextElement &val, Tracer tracer)
void serialize_impl(const ::fb2::FB2TextElement &, Tracer)
{
}

Expand Down
1 change: 1 addition & 0 deletions elasticsearch/books/book_reader.cpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#include <txml/txml.hpp>
#include "elasticsearch/books/book_reader.hpp"

#include <txml/applications/fb2/fb2_fwd.h>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ TXML_PREPARE_SCHEMA_SERIALIZER_DISPATCHABLE_CLASS(to_schema, Parent, SchemaToJSO
}

template<class SerializedItem, class Tracer>
void serialize_schema_tag_impl(txml::LeafTag&& t, Tracer &tracer)
void serialize_schema_tag_impl(txml::LeafTag&&, Tracer &)
{
//this->json_object_stack_helper->push(nlohmann::json::object({{SerializedItem::class_name(),{{"type", "text"}}}}));
this->json_object_stack_helper->push(nlohmann::json::object(
Expand Down
2 changes: 2 additions & 0 deletions elasticsearch/common_model/BinaryBlob.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@

#include <vector>
#include <txml/txml_fwd.h>
#include <txml/include/XMLNodeLeaf.hpp>

#include "elasticsearch/utils/base64.h"

namespace elasticsearch
Expand Down
1 change: 1 addition & 0 deletions elasticsearch/common_model/OriginalPath.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#define COMMON_MODEL_ORIGINAL_PATH_H

#include <txml/txml_fwd.h>
#include <txml/include/XMLNodeLeaf.hpp>

namespace elasticsearch
{
Expand Down
14 changes: 7 additions & 7 deletions elasticsearch/common_model/serializers/schema_serializer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,19 +18,19 @@ TXML_PREPARE_SCHEMA_SERIALIZER_DISPATCHABLE_CLASS(to_schema, Parent, SchemaToJSO
TXML_SCHEMA_SERIALIZER_DISPATCHABLE_OBJECT

template<class Tracer>
void serialize_schema_impl(txml::details::SchemaTag<BinaryBlob>, Tracer tracer)
void serialize_schema_impl(txml::details::SchemaTag<BinaryBlob>, Tracer)
{
this->json_object_stack_helper->push(nlohmann::json::object({{BinaryBlob::class_name(),{{"type", "binary"}}}}));
}

template<class Tracer>
void serialize_schema_impl(txml::details::SchemaTag<CreationDateTime>, Tracer tracer)
void serialize_schema_impl(txml::details::SchemaTag<CreationDateTime>, Tracer)
{
this->json_object_stack_helper->push(nlohmann::json::object({{CreationDateTime::class_name(),{{"type", "date"}}}}));
}

template<class Tracer>
void serialize_schema_impl(txml::details::SchemaTag<Tags>, Tracer tracer)
void serialize_schema_impl(txml::details::SchemaTag<Tags>, Tracer)
{
this->json_object_stack_helper->push(nlohmann::json::object(
{
Expand All @@ -47,7 +47,7 @@ TXML_PREPARE_SCHEMA_SERIALIZER_DISPATCHABLE_CLASS(to_schema, Parent, SchemaToJSO
}

template<class Tracer>
void serialize_schema_impl(txml::details::SchemaTag<Preview>, Tracer tracer)
void serialize_schema_impl(txml::details::SchemaTag<Preview>, Tracer)
{
this->json_object_stack_helper->push(nlohmann::json::object({{Preview::class_name(),{{"type", "binary"}}}}));
}
Expand All @@ -63,7 +63,7 @@ TXML_PREPARE_SCHEMA_SERIALIZER_DISPATCHABLE_CLASS(to_schema, Parent, SchemaToJSO
}

template<class SerializedItem, class Tracer>
void serialize_schema_tag_impl(txml::LeafTag&& t, Tracer &tracer)
void serialize_schema_tag_impl(txml::LeafTag&&, Tracer &)
{
//this->json_object_stack_helper->push(nlohmann::json::object({{SerializedItem::class_name(),{{"type", "text"}, decorator::make_keyword()}}}));
this->json_object_stack_helper->push(nlohmann::json::object(
Expand All @@ -81,7 +81,7 @@ TXML_PREPARE_SCHEMA_SERIALIZER_DISPATCHABLE_CLASS(to_schema, Parent, SchemaToJSO
}

template<class Tracer>
void serialize_schema_impl(txml::details::SchemaTag<OriginalPath>, Tracer tracer)
void serialize_schema_impl(txml::details::SchemaTag<OriginalPath>, Tracer)
{
this->json_object_stack_helper->push(nlohmann::json::object(
{
Expand All @@ -98,7 +98,7 @@ TXML_PREPARE_SCHEMA_SERIALIZER_DISPATCHABLE_CLASS(to_schema, Parent, SchemaToJSO
}

template<class Tracer>
void serialize_schema_impl(txml::details::SchemaTag<SchemaVersion>, Tracer tracer)
void serialize_schema_impl(txml::details::SchemaTag<SchemaVersion>, Tracer)
{
this->json_object_stack_helper->push(nlohmann::json::object({{SchemaVersion::class_name(),{{"type", "version"}}}}));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,21 +20,21 @@ TXML_PREPARE_SCHEMA_SERIALIZER_DISPATCHABLE_CLASS(to_schema, Parent, SchemaToJSO
TXML_SCHEMA_SERIALIZER_DISPATCHABLE_OBJECT

template<class Tracer>
void serialize_schema_impl(txml::details::SchemaTag<element::Location>, Tracer tracer)
void serialize_schema_impl(txml::details::SchemaTag<element::Location>, Tracer)
{
this->json_object_stack_helper->push(nlohmann::json::object({{element::Location::class_name().data(),{{"type", "geo_point"}}}}));
}

template<class Tracer>
void serialize_schema_impl(txml::details::SchemaTag<element::OriginalTime>, Tracer tracer)
void serialize_schema_impl(txml::details::SchemaTag<element::OriginalTime>, Tracer)
{
this->json_object_stack_helper->push(nlohmann::json::object(
{{element::OriginalTime::class_name().data(),
{{"type", "date"},{"format", "yyyy:MM:dd HH:mm:ss||yyyy-MM-dd||epoch_millis"}}}}));
}

template<class Tracer>
void serialize_schema_impl(txml::details::SchemaTag<element::DigitizeTime>, Tracer tracer)
void serialize_schema_impl(txml::details::SchemaTag<element::DigitizeTime>, Tracer)
{
this->json_object_stack_helper->push(nlohmann::json::object(
{{elasticsearch::image::model::element::DigitizeTime::class_name().data(),
Expand All @@ -52,7 +52,7 @@ TXML_PREPARE_SCHEMA_SERIALIZER_DISPATCHABLE_CLASS(to_schema, Parent, SchemaToJSO
}

template<class SerializedItem, class Tracer>
void serialize_schema_tag_impl(txml::LeafTag&& t, Tracer &tracer)
void serialize_schema_tag_impl(txml::LeafTag&&, Tracer &)
{/*
this->json_object_stack_helper->push(nlohmann::json::object({
{SerializedItem::class_name().data(),{
Expand Down
2 changes: 1 addition & 1 deletion elasticsearch/service/doc_id/schema_serializer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ TXML_PREPARE_SCHEMA_SERIALIZER_DISPATCHABLE_CLASS(to_schema, Parent, SchemaToJSO
}

template<class SerializedItem, class Tracer>
void serialize_schema_tag_impl(txml::LeafTag&& t, Tracer &tracer)
void serialize_schema_tag_impl(txml::LeafTag&&, Tracer &)
{
this->json_object_stack_helper->push(nlohmann::json::object(
{
Expand Down
4 changes: 2 additions & 2 deletions elasticsearch/utils/strings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ namespace elasticsearch
namespace utils
{
range_descriptor::range_descriptor(value_t && v, bool enclosed) :
value(std::move(v)),
border_included(enclosed)
border_included(enclosed),
value(std::move(v))
{
}

Expand Down
Loading

0 comments on commit 4014e7c

Please sign in to comment.