Skip to content

Commit

Permalink
Revert to C++17 build
Browse files Browse the repository at this point in the history
  • Loading branch information
willdealtry committed May 4, 2024
1 parent dfcba71 commit 8365209
Show file tree
Hide file tree
Showing 30 changed files with 181 additions and 151 deletions.
2 changes: 1 addition & 1 deletion cpp/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ cmake_minimum_required(VERSION 3.21) # TARGET_RUNTIME_DLLS
# Make the `project` command handle the version of the project.
cmake_policy(SET CMP0048 NEW)

set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
# We do not need any compilers' extensions, so we disable them.
set(CMAKE_CXX_EXTENSIONS OFF)
Expand Down
5 changes: 2 additions & 3 deletions cpp/arcticdb/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,6 @@ set(arcticdb_srcs
util/memory_mapped_file.hpp
util/name_validation.hpp
util/offset_string.hpp
util/offset_string.hpp
util/optional_defaults.hpp
util/pb_util.hpp
util/preconditions.hpp
Expand Down Expand Up @@ -473,7 +472,6 @@ set(arcticdb_srcs
util/memory_mapped_file.hpp
util/name_validation.cpp
util/offset_string.cpp
util/offset_string.cpp
util/sparse_utils.cpp
util/string_utils.cpp
util/trace.cpp
Expand All @@ -488,7 +486,7 @@ set(arcticdb_srcs
version/symbol_list.cpp
version/version_map_batch_methods.cpp
storage/s3/ec2_utils.cpp
)
storage/lmdb/lmdb.hpp util/cxx17_concepts.hpp)

if(${ARCTICDB_INCLUDE_ROCKSDB})
list (APPEND arcticdb_srcs
Expand Down Expand Up @@ -680,6 +678,7 @@ target_include_directories(arcticdb_python PRIVATE
${LIBMONGOCXX_STATIC_INCLUDE_DIRS}
${LIBBSONCXX_STATIC_INCLUDE_DIRS}
${BITMAGIC_INCLUDE_DIRS}
${LMDB_LIBRARIES}
)


Expand Down
36 changes: 20 additions & 16 deletions cpp/arcticdb/column_store/column.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,11 @@
#include <pybind11/pybind11.h>
#include <pybind11/numpy.h>

#include <concepts>
#include <numeric>
#include <optional>

#include <arcticdb/util/cxx17_concepts.hpp>

namespace py = pybind11;

namespace arcticdb {
Expand Down Expand Up @@ -677,7 +678,7 @@ class Column {
template <
typename input_tdt,
typename functor>
requires std::is_invocable_r_v<void, functor, typename input_tdt::DataTypeTag::raw_type>
// requires std::is_invocable_r_v<void, functor, typename input_tdt::DataTypeTag::raw_type> //TODO reinstate with C++20
static void for_each(const Column& input_column, functor&& f) {
auto input_data = input_column.data();
std::for_each(input_data.cbegin<input_tdt>(), input_data.cend<input_tdt>(), std::forward<functor>(f));
Expand All @@ -686,7 +687,7 @@ class Column {
template <
typename input_tdt,
typename functor>
requires std::is_invocable_r_v<void, functor, typename ColumnData::Enumeration<typename input_tdt::DataTypeTag::raw_type>>
//requires std::is_invocable_r_v<void, functor, typename ColumnData::Enumeration<typename input_tdt::DataTypeTag::raw_type>>
static void for_each_enumerated(const Column& input_column, functor&& f) {
auto input_data = input_column.data();
if (input_column.is_sparse()) {
Expand All @@ -699,10 +700,13 @@ class Column {
}

template <
typename input_tdt,
typename output_tdt,
typename functor>
requires std::is_invocable_r_v<typename output_tdt::DataTypeTag::raw_type, functor, typename input_tdt::DataTypeTag::raw_type>
typename input_tdt,
typename output_tdt,
typename functor,
typename = std::enable_if<
std::is_invocable_r_v<
typename output_tdt::DataTypeTag::raw_type, functor,
typename input_tdt::DataTypeTag::raw_type>>>
static void transform(const Column& input_column, Column& output_column, functor&& f) {
auto input_data = input_column.data();
initialise_output_column(input_column, output_column);
Expand All @@ -719,12 +723,12 @@ class Column {
typename left_input_tdt,
typename right_input_tdt,
typename output_tdt,
typename functor>
requires std::is_invocable_r_v<
typename output_tdt::DataTypeTag::raw_type,
functor,
typename left_input_tdt::DataTypeTag::raw_type,
typename right_input_tdt::DataTypeTag::raw_type>
typename functor,
typename = std::enable_if<std::is_invocable_r_v<
typename output_tdt::DataTypeTag::raw_type,
functor,
typename left_input_tdt::DataTypeTag::raw_type,
typename right_input_tdt::DataTypeTag::raw_type>>>
static void transform(const Column& left_input_column,
const Column& right_input_column,
Column& output_column,
Expand Down Expand Up @@ -783,8 +787,8 @@ class Column {

template <
typename input_tdt,
std::predicate<typename input_tdt::DataTypeTag::raw_type> functor>
static void transform(const Column& input_column,
typename functor>
static void transform_to_bitset(const Column& input_column,
util::BitSet& output_bitset,
bool sparse_missing_value_output,
functor&& f) {
Expand All @@ -806,7 +810,7 @@ class Column {
template <
typename left_input_tdt,
typename right_input_tdt,
std::relation<typename left_input_tdt::DataTypeTag::raw_type, typename right_input_tdt::DataTypeTag::raw_type> functor>
typename functor>
static void transform(const Column& left_input_column,
const Column& right_input_column,
util::BitSet& output_bitset,
Expand Down
4 changes: 2 additions & 2 deletions cpp/arcticdb/column_store/column_data.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ struct ColumnData {
return data_.ptr_ == other.data_.ptr_;
}

base_type::reference dereference() const {
typename base_type::reference dereference() const {
if constexpr (iterator_type == IteratorType::ENUMERATED) {
return data_;
} else {
Expand All @@ -270,7 +270,7 @@ struct ColumnData {
ColumnData* parent_{nullptr};
std::optional<TypedBlockData<TDT>> opt_block_{std::nullopt};
std::size_t remaining_values_in_block_{0};
base_type::value_type data_;
typename base_type::value_type data_;
};

ColumnData(
Expand Down
10 changes: 7 additions & 3 deletions cpp/arcticdb/entity/stream_descriptor.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -272,9 +272,13 @@ StreamDescriptor index_descriptor(const StreamId& stream_id, IndexType, const Ra
}

template <typename IndexType>
StreamDescriptor index_descriptor(StreamId stream_id, IndexType index_type,
std::initializer_list<FieldRef> fields) {
return index_descriptor(stream_id, index_type, folly::gen::from(fields) | folly::gen::as<std::vector>());
StreamDescriptor index_descriptor(StreamId stream_id, IndexType index_type, std::initializer_list<FieldRef> fields) {
std::vector<FieldRef> fields_vec;
fields_vec.reserve(fields.size());
for(const auto& field : fields)
fields_vec.push_back(field);

return index_descriptor(stream_id, index_type, fields_vec);
}

template <typename IndexType, typename RangeType>
Expand Down
35 changes: 10 additions & 25 deletions cpp/arcticdb/log/log.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,16 +33,12 @@ namespace arcticdb::log {

static const char* DefaultLogPattern = "%Y%m%d %H:%M:%S.%f %t %L %n | %v";


namespace {
std::shared_ptr<Loggers> loggers_instance_;
std::once_flag loggers_init_flag_;
}


struct Loggers::Impl
{
std::shared_ptr<Loggers> loggers_instance_;
std::once_flag loggers_init_flag_;
} // namespace

struct Loggers::Impl {
std::mutex config_mutex_;
std::unordered_map<std::string, spdlog::sink_ptr> sink_by_id_;
std::unique_ptr<spdlog::logger> unconfigured_ = std::make_unique<spdlog::logger>("arcticdb",
Expand Down Expand Up @@ -70,7 +66,6 @@ struct Loggers::Impl
spdlog::logger& logger_ref(std::unique_ptr<spdlog::logger>& src);
};


constexpr auto get_default_log_level() {
return spdlog::level::info;
}
Expand Down Expand Up @@ -128,15 +123,13 @@ namespace fs = std::filesystem;
using SinkConf = arcticdb::proto::logger::SinkConfig;

Loggers::Loggers()
: impl_(std::make_unique<Impl>())
{
: impl_(std::make_unique<Impl>()) {
impl_->unconfigured_->set_level(get_default_log_level());
}

Loggers::~Loggers() = default;

Loggers& Loggers::instance()
{
Loggers& Loggers::instance() {
std::call_once(loggers_init_flag_, &Loggers::init);
return *loggers_instance_;
}
Expand Down Expand Up @@ -204,7 +197,6 @@ void Loggers::flush_all() {
snapshot().flush();
}


void Loggers::destroy_instance() {
loggers_instance_.reset();
}
Expand All @@ -213,7 +205,6 @@ void Loggers::init() {
loggers_instance_ = std::make_shared<Loggers>();
}


namespace {
std::string make_parent_dir(const std::string &p_str, std::string_view def_p_str) {
fs::path p;
Expand All @@ -236,7 +227,6 @@ spdlog::logger& Loggers::Impl::logger_ref(std::unique_ptr<spdlog::logger>& src)
return *unconfigured_;
}


bool Loggers::configure(const arcticdb::proto::logger::LoggersConfig &conf, bool force) {
auto lock = std::scoped_lock(impl_->config_mutex_);
if (!force && impl_->root_)
Expand Down Expand Up @@ -320,7 +310,6 @@ bool Loggers::configure(const arcticdb::proto::logger::LoggersConfig &conf, bool
check_and_configure("symbol", "root", impl_->symbol_);
check_and_configure("snapshot", "root", impl_->snapshot_);


if (auto flush_sec = util::as_opt(conf.flush_interval_seconds()).value_or(1); flush_sec != 0) {
impl_->periodic_worker_.emplace(
[loggers = std::weak_ptr(loggers_instance_)]() {
Expand All @@ -332,7 +321,6 @@ bool Loggers::configure(const arcticdb::proto::logger::LoggersConfig &conf, bool
return true;
}


void Loggers::Impl::configure_logger(
const arcticdb::proto::logger::LoggerConfig &conf,
const std::string &name,
Expand All @@ -354,18 +342,15 @@ void Loggers::Impl::configure_logger(
logger = std::make_unique<spdlog::logger>(fq_name, sink_ptrs.begin(), sink_ptrs.end());
}

if (!conf.pattern().empty()) {
if (!conf.pattern().empty())
logger->set_pattern(conf.pattern());
}
else {
else
logger->set_pattern(DefaultLogPattern);
}

if (conf.level() != 0) {
if (conf.level() != 0)
logger->set_level(static_cast<spdlog::level::level_enum>(conf.level() - 1));
} else {
else
logger->set_level(get_default_log_level());
}
}

}
2 changes: 1 addition & 1 deletion cpp/arcticdb/pipeline/index_writer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

namespace arcticdb::pipelines::index {
// TODO: change the name - something like KeysSegmentWriter or KeyAggragator or better
template<ValidIndex Index>
template<typename Index>
class IndexWriter {
// All index segments are row-count indexed in the sense that the keys are
// already ordered - they don't need an additional index
Expand Down
12 changes: 0 additions & 12 deletions cpp/arcticdb/pipeline/input_tensor_frame.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,6 @@ namespace arcticdb::pipelines {

using namespace arcticdb::entity;

/// @TODO Move to a separate "util" header
template <typename T, typename... U>
concept is_any_of = (std::same_as<T, U> || ...);

template <typename IndexT>
concept ValidIndex = is_any_of<
std::remove_cvref_t<std::remove_pointer_t<std::decay_t<IndexT>>>,
stream::TimeseriesIndex,
stream::RowCountIndex,
stream::TableIndex,
stream::EmptyIndex>;


struct InputTensorFrame {
InputTensorFrame() :
Expand Down
7 changes: 7 additions & 0 deletions cpp/arcticdb/processing/clause.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -351,6 +351,13 @@ inline StreamDescriptor empty_descriptor(arcticdb::proto::descriptors::IndexDesc
}

struct NamedAggregator {
NamedAggregator(const std::string& s, const std::string& t, const std::string& v) :
aggregation_operator_(s),
input_column_name_(t),
output_column_name_(v){

}

std::string aggregation_operator_;
std::string input_column_name_;
std::string output_column_name_;
Expand Down
2 changes: 1 addition & 1 deletion cpp/arcticdb/processing/operation_dispatch.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ VariantData transform_to_bitset(const VariantData& data) {
details::visit_type(column_with_strings.column_->type().data_type(), [&column_with_strings, &output_bitset](auto col_tag) {
using type_info = ScalarTypeInfo<decltype(col_tag)>;
if constexpr (is_bool_type(type_info::data_type)) {
Column::transform<typename type_info::TDT>(*column_with_strings.column_, output_bitset, false, [](auto input_value) -> bool {
Column::transform_to_bitset<typename type_info::TDT>(*column_with_strings.column_, output_bitset, false, [](auto input_value) -> bool {
return input_value;
});
} else {
Expand Down

0 comments on commit 8365209

Please sign in to comment.